LCOV - code coverage report
Current view: top level - builds/gnutls/coverage/gnutls-git/lib/accelerated/x86 - aes-gcm-x86-pclmul.c (source / functions) Hit Total Coverage
Test: GnuTLS-3.6.14 Code Coverage Lines: 0 123 0.0 %
Date: 2020-10-30 04:50:48 Functions: 0 10 0.0 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : /*
       2             :  * Copyright (C) 2011-2012 Free Software Foundation, Inc.
       3             :  * Copyright (C) 2018 Red Hat, Inc.
       4             :  *
       5             :  * Author: Nikos Mavrogiannopoulos
       6             :  *
       7             :  * This file is part of GnuTLS.
       8             :  *
       9             :  * The GnuTLS is free software; you can redistribute it and/or
      10             :  * modify it under the terms of the GNU Lesser General Public License
      11             :  * as published by the Free Software Foundation; either version 2.1 of
      12             :  * the License, or (at your option) any later version.
      13             :  *
      14             :  * This library is distributed in the hope that it will be useful, but
      15             :  * WITHOUT ANY WARRANTY; without even the implied warranty of
      16             :  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      17             :  * Lesser General Public License for more details.
      18             :  *
      19             :  * You should have received a copy of the GNU Lesser General Public License
      20             :  * along with this program.  If not, see <https://www.gnu.org/licenses/>
      21             :  *
      22             :  */
      23             : 
      24             : /*
      25             :  * The following code is an implementation of the AES-128-GCM cipher
      26             :  * using intel's AES instruction set.
      27             :  */
      28             : 
      29             : #include "errors.h"
      30             : #include "gnutls_int.h"
      31             : #include <gnutls/crypto.h>
      32             : #include "errors.h"
      33             : #include <aes-x86.h>
      34             : #include <x86-common.h>
      35             : #include <nettle/memxor.h>
      36             : #include <byteswap.h>
      37             : 
      38             : #define GCM_BLOCK_SIZE 16
      39             : 
      40             : /* GCM mode */
      41             : 
      42             : typedef struct {
      43             :         uint64_t hi, lo;
      44             : } u128;
      45             : 
      46             : /* This is the gcm128 structure used in openssl. It
      47             :  * is compatible with the included assembly code.
      48             :  */
      49             : struct gcm128_context {
      50             :         union {
      51             :                 uint64_t u[2];
      52             :                 uint32_t d[4];
      53             :                 uint8_t c[16];
      54             :         } Yi, EKi, EK0, len, Xi, H;
      55             :         u128 Htable[16];
      56             : };
      57             : 
      58             : struct aes_gcm_ctx {
      59             :         AES_KEY expanded_key;
      60             :         struct gcm128_context gcm;
      61             :         unsigned finished;
      62             :         unsigned auth_finished;
      63             : };
      64             : 
      65             : void gcm_init_clmul(u128 Htable[16], const uint64_t Xi[2]);
      66             : void gcm_ghash_clmul(uint64_t Xi[2], const u128 Htable[16],
      67             :                      const uint8_t * inp, size_t len);
      68             : void gcm_gmult_clmul(uint64_t Xi[2], const u128 Htable[16]);
      69             : 
      70           0 : static void aes_gcm_deinit(void *_ctx)
      71             : {
      72           0 :         struct aes_gcm_ctx *ctx = _ctx;
      73             : 
      74           0 :         zeroize_temp_key(ctx, sizeof(*ctx));
      75           0 :         gnutls_free(ctx);
      76           0 : }
      77             : 
      78             : static int
      79           0 : aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx,
      80             :                     int enc)
      81             : {
      82             :         /* we use key size to distinguish */
      83           0 :         if (algorithm != GNUTLS_CIPHER_AES_128_GCM &&
      84           0 :             algorithm != GNUTLS_CIPHER_AES_192_GCM &&
      85             :             algorithm != GNUTLS_CIPHER_AES_256_GCM)
      86             :                 return GNUTLS_E_INVALID_REQUEST;
      87             : 
      88           0 :         *_ctx = gnutls_calloc(1, sizeof(struct aes_gcm_ctx));
      89           0 :         if (*_ctx == NULL) {
      90           0 :                 gnutls_assert();
      91           0 :                 return GNUTLS_E_MEMORY_ERROR;
      92             :         }
      93             : 
      94             :         return 0;
      95             : }
      96             : 
      97             : static int
      98           0 : aes_gcm_cipher_setkey(void *_ctx, const void *userkey, size_t keysize)
      99             : {
     100           0 :         struct aes_gcm_ctx *ctx = _ctx;
     101           0 :         int ret;
     102             : 
     103           0 :         CHECK_AES_KEYSIZE(keysize);
     104             : 
     105           0 :         ret =
     106           0 :             aesni_set_encrypt_key(userkey, keysize * 8,
     107           0 :                                   ALIGN16(&ctx->expanded_key));
     108           0 :         if (ret != 0)
     109           0 :                 return gnutls_assert_val(GNUTLS_E_ENCRYPTION_FAILED);
     110             : 
     111           0 :         aesni_ecb_encrypt(ctx->gcm.H.c, ctx->gcm.H.c,
     112             :                           GCM_BLOCK_SIZE, ALIGN16(&ctx->expanded_key), 1);
     113             : 
     114           0 :         ctx->gcm.H.u[0] = bswap_64(ctx->gcm.H.u[0]);
     115           0 :         ctx->gcm.H.u[1] = bswap_64(ctx->gcm.H.u[1]);
     116             : 
     117           0 :         gcm_init_clmul(ctx->gcm.Htable, ctx->gcm.H.u);
     118             : 
     119           0 :         return 0;
     120             : }
     121             : 
     122           0 : static int aes_gcm_setiv(void *_ctx, const void *iv, size_t iv_size)
     123             : {
     124           0 :         struct aes_gcm_ctx *ctx = _ctx;
     125             : 
     126           0 :         if (iv_size != GCM_BLOCK_SIZE - 4)
     127           0 :                 return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
     128             : 
     129           0 :         memset(ctx->gcm.Xi.c, 0, sizeof(ctx->gcm.Xi.c));
     130           0 :         memset(ctx->gcm.len.c, 0, sizeof(ctx->gcm.len.c));
     131             : 
     132           0 :         memcpy(ctx->gcm.Yi.c, iv, GCM_BLOCK_SIZE - 4);
     133           0 :         ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 4] = 0;
     134           0 :         ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 3] = 0;
     135           0 :         ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 2] = 0;
     136           0 :         ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 1;
     137             : 
     138           0 :         aesni_ecb_encrypt(ctx->gcm.Yi.c, ctx->gcm.EK0.c,
     139           0 :                           GCM_BLOCK_SIZE, ALIGN16(&ctx->expanded_key), 1);
     140           0 :         ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 2;
     141           0 :         ctx->finished = 0;
     142           0 :         ctx->auth_finished = 0;
     143           0 :         return 0;
     144             : }
     145             : 
     146             : static void
     147           0 : gcm_ghash(struct aes_gcm_ctx *ctx, const uint8_t * src, size_t src_size)
     148             : {
     149           0 :         size_t rest = src_size % GCM_BLOCK_SIZE;
     150           0 :         size_t aligned_size = src_size - rest;
     151             : 
     152           0 :         if (aligned_size > 0)
     153           0 :                 gcm_ghash_clmul(ctx->gcm.Xi.u, ctx->gcm.Htable, src,
     154             :                                 aligned_size);
     155             : 
     156           0 :         if (rest > 0) {
     157           0 :                 memxor(ctx->gcm.Xi.c, src + aligned_size, rest);
     158           0 :                 gcm_gmult_clmul(ctx->gcm.Xi.u, ctx->gcm.Htable);
     159             :         }
     160           0 : }
     161             : 
     162             : static inline void
     163           0 : ctr_encrypt_last(struct aes_gcm_ctx *ctx, const uint8_t * src,
     164             :                  uint8_t * dst, size_t pos, size_t length)
     165             : {
     166           0 :         uint8_t tmp[GCM_BLOCK_SIZE];
     167           0 :         uint8_t out[GCM_BLOCK_SIZE];
     168             : 
     169           0 :         memcpy(tmp, &src[pos], length);
     170           0 :         aesni_ctr32_encrypt_blocks(tmp, out, 1,
     171           0 :                                    ALIGN16(&ctx->expanded_key),
     172           0 :                                    ctx->gcm.Yi.c);
     173             : 
     174           0 :         memcpy(&dst[pos], out, length);
     175             : 
     176           0 : }
     177             : 
     178             : static int
     179           0 : aes_gcm_encrypt(void *_ctx, const void *src, size_t src_size,
     180             :                 void *dst, size_t length)
     181             : {
     182           0 :         struct aes_gcm_ctx *ctx = _ctx;
     183           0 :         int blocks = src_size / GCM_BLOCK_SIZE;
     184           0 :         int exp_blocks = blocks * GCM_BLOCK_SIZE;
     185           0 :         int rest = src_size - (exp_blocks);
     186           0 :         uint32_t counter;
     187             : 
     188           0 :         if (unlikely(ctx->finished))
     189           0 :                 return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
     190             : 
     191           0 :         if (blocks > 0) {
     192           0 :                 aesni_ctr32_encrypt_blocks(src, dst,
     193             :                                            blocks,
     194           0 :                                            ALIGN16(&ctx->expanded_key),
     195           0 :                                            ctx->gcm.Yi.c);
     196             : 
     197           0 :                 counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12);
     198           0 :                 counter += blocks;
     199           0 :                 _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12);
     200             :         }
     201             : 
     202           0 :         if (rest > 0) {      /* last incomplete block */
     203           0 :                 ctr_encrypt_last(ctx, src, dst, exp_blocks, rest);
     204           0 :                 ctx->finished = 1;
     205             :         }
     206             : 
     207           0 :         gcm_ghash(ctx, dst, src_size);
     208           0 :         ctx->gcm.len.u[1] += src_size;
     209             : 
     210           0 :         return 0;
     211             : }
     212             : 
     213             : static int
     214           0 : aes_gcm_decrypt(void *_ctx, const void *src, size_t src_size,
     215             :                 void *dst, size_t dst_size)
     216             : {
     217           0 :         struct aes_gcm_ctx *ctx = _ctx;
     218           0 :         int blocks = src_size / GCM_BLOCK_SIZE;
     219           0 :         int exp_blocks = blocks * GCM_BLOCK_SIZE;
     220           0 :         int rest = src_size - (exp_blocks);
     221           0 :         uint32_t counter;
     222             : 
     223           0 :         if (unlikely(ctx->finished))
     224           0 :                 return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
     225             : 
     226           0 :         gcm_ghash(ctx, src, src_size);
     227           0 :         ctx->gcm.len.u[1] += src_size;
     228             : 
     229           0 :         if (blocks > 0) {
     230           0 :                 aesni_ctr32_encrypt_blocks(src, dst,
     231             :                                            blocks,
     232           0 :                                            ALIGN16(&ctx->expanded_key),
     233           0 :                                            ctx->gcm.Yi.c);
     234             : 
     235           0 :                 counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12);
     236           0 :                 counter += blocks;
     237           0 :                 _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12);
     238             :         }
     239             : 
     240           0 :         if (rest > 0) {      /* last incomplete block */
     241           0 :                 ctr_encrypt_last(ctx, src, dst, exp_blocks, rest);
     242           0 :                 ctx->finished = 1;
     243             :         }
     244             : 
     245             :         return 0;
     246             : }
     247             : 
     248           0 : static int aes_gcm_auth(void *_ctx, const void *src, size_t src_size)
     249             : {
     250           0 :         struct aes_gcm_ctx *ctx = _ctx;
     251             : 
     252           0 :         if (unlikely(ctx->auth_finished))
     253           0 :                 return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
     254             : 
     255           0 :         gcm_ghash(ctx, src, src_size);
     256           0 :         ctx->gcm.len.u[0] += src_size;
     257             : 
     258           0 :         if (src_size % GCM_BLOCK_SIZE != 0)
     259           0 :                 ctx->auth_finished = 1;
     260             : 
     261             :         return 0;
     262             : }
     263             : 
     264             : 
     265           0 : static void aes_gcm_tag(void *_ctx, void *tag, size_t tagsize)
     266             : {
     267           0 :         struct aes_gcm_ctx *ctx = _ctx;
     268           0 :         uint8_t buffer[GCM_BLOCK_SIZE];
     269           0 :         uint64_t alen, clen;
     270             : 
     271           0 :         alen = ctx->gcm.len.u[0] * 8;
     272           0 :         clen = ctx->gcm.len.u[1] * 8;
     273             : 
     274           0 :         _gnutls_write_uint64(alen, buffer);
     275           0 :         _gnutls_write_uint64(clen, &buffer[8]);
     276             : 
     277           0 :         gcm_ghash_clmul(ctx->gcm.Xi.u, ctx->gcm.Htable, buffer,
     278             :                         GCM_BLOCK_SIZE);
     279             : 
     280           0 :         ctx->gcm.Xi.u[0] ^= ctx->gcm.EK0.u[0];
     281           0 :         ctx->gcm.Xi.u[1] ^= ctx->gcm.EK0.u[1];
     282             : 
     283           0 :         memcpy(tag, ctx->gcm.Xi.c, MIN(GCM_BLOCK_SIZE, tagsize));
     284           0 : }
     285             : 
     286             : #include "aes-gcm-aead.h"
     287             : 
     288             : const gnutls_crypto_cipher_st _gnutls_aes_gcm_pclmul = {
     289             :         .init = aes_gcm_cipher_init,
     290             :         .setkey = aes_gcm_cipher_setkey,
     291             :         .setiv = aes_gcm_setiv,
     292             :         .aead_encrypt = aes_gcm_aead_encrypt,
     293             :         .aead_decrypt = aes_gcm_aead_decrypt,
     294             :         .encrypt = aes_gcm_encrypt,
     295             :         .decrypt = aes_gcm_decrypt,
     296             :         .deinit = aes_gcm_deinit,
     297             :         .tag = aes_gcm_tag,
     298             :         .auth = aes_gcm_auth,
     299             : };

Generated by: LCOV version 1.14