113168Smatt.horsnell@arm.com/*
213168Smatt.horsnell@arm.com * Copyright (c) 2018 ARM Limited
313168Smatt.horsnell@arm.com * All rights reserved
413168Smatt.horsnell@arm.com *
513168Smatt.horsnell@arm.com * The license below extends only to copyright in the software and shall
613168Smatt.horsnell@arm.com * not be construed as granting a license to any other intellectual
713168Smatt.horsnell@arm.com * property including but not limited to intellectual property relating
813168Smatt.horsnell@arm.com * to a hardware implementation of the functionality of the software
913168Smatt.horsnell@arm.com * licensed hereunder.  You may use the software subject to the license
1013168Smatt.horsnell@arm.com * terms below provided that you ensure that this notice is replicated
1113168Smatt.horsnell@arm.com * unmodified and in its entirety in all distributions of the software,
1213168Smatt.horsnell@arm.com * modified or unmodified, in source code or in binary form.
1313168Smatt.horsnell@arm.com *
1413168Smatt.horsnell@arm.com * Redistribution and use in source and binary forms, with or without
1513168Smatt.horsnell@arm.com * modification, are permitted provided that the following conditions are
1613168Smatt.horsnell@arm.com * met: redistributions of source code must retain the above copyright
1713168Smatt.horsnell@arm.com * notice, this list of conditions and the following disclaimer;
1813168Smatt.horsnell@arm.com * redistributions in binary form must reproduce the above copyright
1913168Smatt.horsnell@arm.com * notice, this list of conditions and the following disclaimer in the
2013168Smatt.horsnell@arm.com * documentation and/or other materials provided with the distribution;
2113168Smatt.horsnell@arm.com * neither the name of the copyright holders nor the names of its
2213168Smatt.horsnell@arm.com * contributors may be used to endorse or promote products derived from
2313168Smatt.horsnell@arm.com * this software without specific prior written permission.
2413168Smatt.horsnell@arm.com *
2513168Smatt.horsnell@arm.com * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
2613168Smatt.horsnell@arm.com * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
2713168Smatt.horsnell@arm.com * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
2813168Smatt.horsnell@arm.com * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
2913168Smatt.horsnell@arm.com * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
3013168Smatt.horsnell@arm.com * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
3113168Smatt.horsnell@arm.com * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
3213168Smatt.horsnell@arm.com * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
3313168Smatt.horsnell@arm.com * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
3413168Smatt.horsnell@arm.com * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
3513168Smatt.horsnell@arm.com * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
3613168Smatt.horsnell@arm.com *
3713168Smatt.horsnell@arm.com * Authors: Matt Horsnell
3813168Smatt.horsnell@arm.com *          Prakash Ramrakhyani
3913168Smatt.horsnell@arm.com */
4013168Smatt.horsnell@arm.com
4113168Smatt.horsnell@arm.com#ifndef __ARCH_ARM_INSTS_CRYPTO_HH__
4213168Smatt.horsnell@arm.com#define __ARCH_ARM_INSTS_CRYPTO_HH__
4313168Smatt.horsnell@arm.com
4413168Smatt.horsnell@arm.comnamespace ArmISA {
4513168Smatt.horsnell@arm.com
4613168Smatt.horsnell@arm.comclass Crypto
4713168Smatt.horsnell@arm.com{
4813168Smatt.horsnell@arm.com    enum SHAOp : uint8_t
4913168Smatt.horsnell@arm.com    {
5013168Smatt.horsnell@arm.com        CHOOSE = 0,
5113168Smatt.horsnell@arm.com        PARITY,
5213168Smatt.horsnell@arm.com        MAJORITY
5313168Smatt.horsnell@arm.com    };
5413168Smatt.horsnell@arm.com
5513169Smatt.horsnell@arm.com    /** Look up table for subByttes transformation */
5613169Smatt.horsnell@arm.com    static const uint8_t aesSBOX[256];
5713169Smatt.horsnell@arm.com
5813169Smatt.horsnell@arm.com    /** Look up table for inverse subBytes transformation */
5913169Smatt.horsnell@arm.com    static const uint8_t aesInvSBOX[256];
6013169Smatt.horsnell@arm.com
6113169Smatt.horsnell@arm.com    static const uint8_t aesSHIFT[16];
6213169Smatt.horsnell@arm.com    static const uint8_t aesINVSHIFT[16];
6313169Smatt.horsnell@arm.com
6413169Smatt.horsnell@arm.com    /**
6513169Smatt.horsnell@arm.com     * Look up table for Finite Field logarithm where the base
6613169Smatt.horsnell@arm.com     * is the element {03} in the field G(256)
6713169Smatt.horsnell@arm.com     */
6813169Smatt.horsnell@arm.com    static const uint8_t aesFFLOG[256];
6913169Smatt.horsnell@arm.com
7013169Smatt.horsnell@arm.com    /**
7113169Smatt.horsnell@arm.com     * Look up table for {03}^X where {03} and X are elements
7213169Smatt.horsnell@arm.com     * in the filed G(256)
7313169Smatt.horsnell@arm.com     */
7413169Smatt.horsnell@arm.com    static const uint8_t aesFFEXP[256];
7513169Smatt.horsnell@arm.com
7613169Smatt.horsnell@arm.com    /** Finite field multiplication of two elements in the field G(256) */
7713169Smatt.horsnell@arm.com    uint8_t aesFFMul(uint8_t a, uint8_t b);
7813169Smatt.horsnell@arm.com
7913169Smatt.horsnell@arm.com    uint8_t aesFFMul2(uint8_t a)
8013169Smatt.horsnell@arm.com    {
8113169Smatt.horsnell@arm.com        return ((a & 0x80) ? ((a << 1) ^ 0x1b) : (a << 1));
8213169Smatt.horsnell@arm.com    }
8313169Smatt.horsnell@arm.com
8413169Smatt.horsnell@arm.com    void aesSubBytes(uint8_t *output, uint8_t *input);
8513169Smatt.horsnell@arm.com    void aesInvSubBytes(uint8_t *output, uint8_t *input);
8613169Smatt.horsnell@arm.com    void aesShiftRows(uint8_t *output, uint8_t *input);
8713169Smatt.horsnell@arm.com    void aesInvShiftRows(uint8_t *output, uint8_t *input);
8813169Smatt.horsnell@arm.com    void aesAddRoundKey(uint8_t *output, uint8_t *input, uint8_t *key);
8913169Smatt.horsnell@arm.com
9013168Smatt.horsnell@arm.com    uint32_t ror(uint32_t x, uint8_t shift)
9113168Smatt.horsnell@arm.com    {
9213168Smatt.horsnell@arm.com        return (x >> shift) | (x << (32 - shift));
9313168Smatt.horsnell@arm.com    }
9413168Smatt.horsnell@arm.com
9513168Smatt.horsnell@arm.com    uint32_t choose(uint32_t X, uint32_t Y, uint32_t Z)
9613168Smatt.horsnell@arm.com    {
9713168Smatt.horsnell@arm.com        return (((Y ^ Z) & X) ^ Z);
9813168Smatt.horsnell@arm.com    }
9913168Smatt.horsnell@arm.com
10013168Smatt.horsnell@arm.com    uint32_t parity(uint32_t X, uint32_t Y, uint32_t Z)
10113168Smatt.horsnell@arm.com    {
10213168Smatt.horsnell@arm.com        return (X ^ Y ^ Z);
10313168Smatt.horsnell@arm.com    }
10413168Smatt.horsnell@arm.com
10513168Smatt.horsnell@arm.com    uint32_t majority(uint32_t X, uint32_t Y, uint32_t Z)
10613168Smatt.horsnell@arm.com    {
10713168Smatt.horsnell@arm.com        return ((X & Y) | ((X | Y) & Z));
10813168Smatt.horsnell@arm.com    }
10913168Smatt.horsnell@arm.com
11013168Smatt.horsnell@arm.com    uint32_t sigma0(uint32_t X)
11113168Smatt.horsnell@arm.com    {
11213168Smatt.horsnell@arm.com        return ror(X,2) ^ ror(X,13) ^ ror(X,22);
11313168Smatt.horsnell@arm.com    }
11413168Smatt.horsnell@arm.com
11513168Smatt.horsnell@arm.com    uint32_t sigma1(uint32_t X)
11613168Smatt.horsnell@arm.com    {
11713168Smatt.horsnell@arm.com        return ror(X,6) ^ ror(X,11) ^ ror(X,25);
11813168Smatt.horsnell@arm.com    }
11913168Smatt.horsnell@arm.com
12013168Smatt.horsnell@arm.com    void sha256Op(uint32_t *X, uint32_t *Y, uint32_t *Z);
12113168Smatt.horsnell@arm.com    void sha1Op(uint8_t *output, uint8_t *input, uint8_t *input2, SHAOp op);
12213168Smatt.horsnell@arm.com    void _sha1Op(uint32_t *X, uint32_t *Y, uint32_t *Z, SHAOp op);
12313168Smatt.horsnell@arm.com
12413168Smatt.horsnell@arm.com    void load2Reg(uint32_t *X, uint32_t *Y, uint8_t *output, uint8_t *input);
12513168Smatt.horsnell@arm.com    void load3Reg(uint32_t *X, uint32_t *Y, uint32_t *Z,
12613168Smatt.horsnell@arm.com                  uint8_t *output, uint8_t *input, uint8_t *input2);
12713168Smatt.horsnell@arm.com    void store1Reg(uint8_t *output, uint32_t *X);
12813168Smatt.horsnell@arm.com
12913168Smatt.horsnell@arm.com  public:
13013169Smatt.horsnell@arm.com    void aesMixColumns(uint8_t *output, uint8_t *input);
13113169Smatt.horsnell@arm.com    void aesInvMixColumns(uint8_t *output, uint8_t *input);
13213169Smatt.horsnell@arm.com    void aesEncrypt(uint8_t *output, uint8_t *input, uint8_t *key);
13313169Smatt.horsnell@arm.com    void aesDecrypt(uint8_t *output, uint8_t *input, uint8_t *key);
13413168Smatt.horsnell@arm.com    void sha256H(uint8_t *output, uint8_t *input, uint8_t *input2);
13513168Smatt.horsnell@arm.com    void sha256H2(uint8_t *output, uint8_t *input, uint8_t *input2);
13613168Smatt.horsnell@arm.com    void sha256Su0(uint8_t *output, uint8_t *input);
13713168Smatt.horsnell@arm.com    void sha256Su1(uint8_t *output, uint8_t *input, uint8_t *input2);
13813168Smatt.horsnell@arm.com
13913168Smatt.horsnell@arm.com    void sha1C(uint8_t *output, uint8_t *input, uint8_t *input2);
14013168Smatt.horsnell@arm.com    void sha1P(uint8_t *output, uint8_t *input, uint8_t *input2);
14113168Smatt.horsnell@arm.com    void sha1M(uint8_t *output, uint8_t *input, uint8_t *input2);
14213168Smatt.horsnell@arm.com    void sha1H(uint8_t *output, uint8_t *input);
14313168Smatt.horsnell@arm.com    void sha1Su0(uint8_t *output, uint8_t *input, uint8_t *input2);
14413168Smatt.horsnell@arm.com    void sha1Su1(uint8_t *output, uint8_t *input);
14513168Smatt.horsnell@arm.com};
14613168Smatt.horsnell@arm.com
14713168Smatt.horsnell@arm.com} // namespace ArmISA
14813168Smatt.horsnell@arm.com
14913168Smatt.horsnell@arm.com#endif //__ARCH_ARM_INSTS_CRYPTO_HH__
150