@@ -56,16 +56,6 @@ struct aegis_ctx {
56
56
struct aegis_block key ;
57
57
};
58
58
59
- struct aegis_crypt_ops {
60
- int (* skcipher_walk_init )(struct skcipher_walk * walk ,
61
- struct aead_request * req , bool atomic );
62
-
63
- void (* crypt_blocks )(void * state , unsigned int length , const void * src ,
64
- void * dst );
65
- void (* crypt_tail )(void * state , unsigned int length , const void * src ,
66
- void * dst );
67
- };
68
-
69
59
static void crypto_aegis128_aesni_process_ad (
70
60
struct aegis_state * state , struct scatterlist * sg_src ,
71
61
unsigned int assoclen )
@@ -114,20 +104,37 @@ static void crypto_aegis128_aesni_process_ad(
114
104
}
115
105
}
116
106
117
- static void crypto_aegis128_aesni_process_crypt (
118
- struct aegis_state * state , struct skcipher_walk * walk ,
119
- const struct aegis_crypt_ops * ops )
107
+ static __always_inline void
108
+ crypto_aegis128_aesni_process_crypt ( struct aegis_state * state ,
109
+ struct skcipher_walk * walk , bool enc )
120
110
{
121
111
while (walk -> nbytes >= AEGIS128_BLOCK_SIZE ) {
122
- ops -> crypt_blocks (state ,
123
- round_down (walk -> nbytes , AEGIS128_BLOCK_SIZE ),
124
- walk -> src .virt .addr , walk -> dst .virt .addr );
112
+ if (enc )
113
+ crypto_aegis128_aesni_enc (
114
+ state ,
115
+ round_down (walk -> nbytes ,
116
+ AEGIS128_BLOCK_SIZE ),
117
+ walk -> src .virt .addr ,
118
+ walk -> dst .virt .addr );
119
+ else
120
+ crypto_aegis128_aesni_dec (
121
+ state ,
122
+ round_down (walk -> nbytes ,
123
+ AEGIS128_BLOCK_SIZE ),
124
+ walk -> src .virt .addr ,
125
+ walk -> dst .virt .addr );
125
126
skcipher_walk_done (walk , walk -> nbytes % AEGIS128_BLOCK_SIZE );
126
127
}
127
128
128
129
if (walk -> nbytes ) {
129
- ops -> crypt_tail (state , walk -> nbytes , walk -> src .virt .addr ,
130
- walk -> dst .virt .addr );
130
+ if (enc )
131
+ crypto_aegis128_aesni_enc_tail (state , walk -> nbytes ,
132
+ walk -> src .virt .addr ,
133
+ walk -> dst .virt .addr );
134
+ else
135
+ crypto_aegis128_aesni_dec_tail (state , walk -> nbytes ,
136
+ walk -> src .virt .addr ,
137
+ walk -> dst .virt .addr );
131
138
skcipher_walk_done (walk , 0 );
132
139
}
133
140
}
@@ -162,42 +169,39 @@ static int crypto_aegis128_aesni_setauthsize(struct crypto_aead *tfm,
162
169
return 0 ;
163
170
}
164
171
165
- static void crypto_aegis128_aesni_crypt ( struct aead_request * req ,
166
- struct aegis_block * tag_xor ,
167
- unsigned int cryptlen ,
168
- const struct aegis_crypt_ops * ops )
172
+ static __always_inline void
173
+ crypto_aegis128_aesni_crypt ( struct aead_request * req ,
174
+ struct aegis_block * tag_xor ,
175
+ unsigned int cryptlen , bool enc )
169
176
{
170
177
struct crypto_aead * tfm = crypto_aead_reqtfm (req );
171
178
struct aegis_ctx * ctx = crypto_aegis128_aesni_ctx (tfm );
172
179
struct skcipher_walk walk ;
173
180
struct aegis_state state ;
174
181
175
- ops -> skcipher_walk_init (& walk , req , true);
182
+ if (enc )
183
+ skcipher_walk_aead_encrypt (& walk , req , true);
184
+ else
185
+ skcipher_walk_aead_decrypt (& walk , req , true);
176
186
177
187
kernel_fpu_begin ();
178
188
179
189
crypto_aegis128_aesni_init (& state , ctx -> key .bytes , req -> iv );
180
190
crypto_aegis128_aesni_process_ad (& state , req -> src , req -> assoclen );
181
- crypto_aegis128_aesni_process_crypt (& state , & walk , ops );
191
+ crypto_aegis128_aesni_process_crypt (& state , & walk , enc );
182
192
crypto_aegis128_aesni_final (& state , tag_xor , req -> assoclen , cryptlen );
183
193
184
194
kernel_fpu_end ();
185
195
}
186
196
187
197
static int crypto_aegis128_aesni_encrypt (struct aead_request * req )
188
198
{
189
- static const struct aegis_crypt_ops OPS = {
190
- .skcipher_walk_init = skcipher_walk_aead_encrypt ,
191
- .crypt_blocks = crypto_aegis128_aesni_enc ,
192
- .crypt_tail = crypto_aegis128_aesni_enc_tail ,
193
- };
194
-
195
199
struct crypto_aead * tfm = crypto_aead_reqtfm (req );
196
200
struct aegis_block tag = {};
197
201
unsigned int authsize = crypto_aead_authsize (tfm );
198
202
unsigned int cryptlen = req -> cryptlen ;
199
203
200
- crypto_aegis128_aesni_crypt (req , & tag , cryptlen , & OPS );
204
+ crypto_aegis128_aesni_crypt (req , & tag , cryptlen , true );
201
205
202
206
scatterwalk_map_and_copy (tag .bytes , req -> dst ,
203
207
req -> assoclen + cryptlen , authsize , 1 );
@@ -208,12 +212,6 @@ static int crypto_aegis128_aesni_decrypt(struct aead_request *req)
208
212
{
209
213
static const struct aegis_block zeros = {};
210
214
211
- static const struct aegis_crypt_ops OPS = {
212
- .skcipher_walk_init = skcipher_walk_aead_decrypt ,
213
- .crypt_blocks = crypto_aegis128_aesni_dec ,
214
- .crypt_tail = crypto_aegis128_aesni_dec_tail ,
215
- };
216
-
217
215
struct crypto_aead * tfm = crypto_aead_reqtfm (req );
218
216
struct aegis_block tag ;
219
217
unsigned int authsize = crypto_aead_authsize (tfm );
@@ -222,7 +220,7 @@ static int crypto_aegis128_aesni_decrypt(struct aead_request *req)
222
220
scatterwalk_map_and_copy (tag .bytes , req -> src ,
223
221
req -> assoclen + cryptlen , authsize , 0 );
224
222
225
- crypto_aegis128_aesni_crypt (req , & tag , cryptlen , & OPS );
223
+ crypto_aegis128_aesni_crypt (req , & tag , cryptlen , false );
226
224
227
225
return crypto_memneq (tag .bytes , zeros .bytes , authsize ) ? - EBADMSG : 0 ;
228
226
}
0 commit comments