1 /***************************************************************************
2 * The yarrow pseudo-randomness genrator *
3 * extracted from nettle, the low-level cryptographics library *
5 * Copyright (C) 2007 Tarek Saidi <tarek.saidi@arcor.de> *
6 * Copyright (C) 2001 Niels Müler *
8 * This program is free software; you can redistribute it and/or modify *
9 * it under the terms of the GNU General Public License as published by *
10 * the Free Software Foundation; version 2 of the License. *
12 * This program is distributed in the hope that it will be useful, *
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
15 * GNU General Public License for more details. *
17 * You should have received a copy of the GNU General Public License *
18 * along with this program; if not, write to the *
19 * Free Software Foundation, Inc., *
20 * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
21 ***************************************************************************/
32 #define YARROW_DEBUG 0
39 #define SHA256_DIGEST_SIZE 32
40 #define AES_MAX_KEY_SIZE 32
44 /* An upper limit on the entropy (in bits) in one octet of sample
46 #define YARROW_MULTIPLIER 4
48 /* Entropy threshold for reseeding from the fast pool */
49 #define YARROW_FAST_THRESHOLD 100
51 /* Entropy threshold for reseeding from the fast pool */
52 #define YARROW_SLOW_THRESHOLD 160
54 /* Number of sources that must exceed the threshold for slow reseed */
55 #define YARROW_SLOW_K 2
57 /* The number of iterations when reseeding, P_t in the yarrow paper.
58 * Should be chosen so that reseeding takes on the order of 0.1-1
60 #define YARROW_RESEED_ITERATIONS 1500
62 /* Entropy estimates sticks to this value, it is treated as infinity
63 * in calculations. It should fit comfortably in an uint32_t, to avoid
65 #define YARROW_MAX_ENTROPY 0x100000
67 /* Forward declarations */
70 yarrow_fast_reseed(struct yarrow256_ctx *ctx);
73 yarrow_gate(struct yarrow256_ctx *ctx);
76 yarrow256_init(struct yarrow256_ctx *ctx,
78 struct yarrow_source *s)
82 sha256_starts(&ctx->pools[0]);
83 sha256_starts(&ctx->pools[1]);
87 /* Not strictly, necessary, but it makes it easier to see if the
89 memset(ctx->seed_file, 0, YARROW256_SEED_FILE_SIZE);
90 memset(ctx->counter, 0, sizeof(ctx->counter));
97 ctx->sources[i].estimate[YARROW_FAST] = 0;
98 ctx->sources[i].estimate[YARROW_SLOW] = 0;
99 ctx->sources[i].next = YARROW_FAST;
104 yarrow256_seed(struct yarrow256_ctx *ctx,
106 const quint8 *seed_file)
108 /* FIXME: Perhaps it's better to use assert ? */
112 sha256_update(&ctx->pools[YARROW_FAST], seed_file, length);
113 yarrow_fast_reseed(ctx);
118 /* FIXME: Generalize so that it generates a few more blocks at a
121 yarrow_generate_block(struct yarrow256_ctx *ctx,
125 //aes_encrypt(&ctx->key, sizeof(ctx->counter), block, ctx->counter);
126 aes_ecb_encrypt(ctx->counter,block,sizeof(ctx->counter),&ctx->key);
128 /* Increment counter, treating it as a big-endian number. This is
129 * machine independent, and follows appendix B of the NIST
130 * specification of cipher modes of operation.
132 * We could keep a representation of thy counter as 4 32-bit values,
133 * and write entire words (in big-endian byteorder) into the counter
134 * block, whenever they change. */
135 for (i = sizeof(ctx->counter); i--; )
137 if (++ctx->counter[i])
143 yarrow_iterate(quint8 *digest)
145 quint8 v0[SHA256_DIGEST_SIZE];
148 memcpy(v0, digest, SHA256_DIGEST_SIZE);
150 /* When hashed inside the loop, i should run from 1 to
151 * YARROW_RESEED_ITERATIONS */
152 for (i = 0; ++i < YARROW_RESEED_ITERATIONS; )
157 sha256_starts(&hash);
159 /* Hash v_i | v_0 | i */
160 WRITE_UINT32(count, i);
161 sha256_update(&hash, digest, SHA256_DIGEST_SIZE);
162 sha256_update(&hash, v0, sizeof(v0));
163 sha256_update(&hash, count, sizeof(count));
164 sha256_finish(&hash,digest);
168 /* NOTE: The SHA-256 digest size equals the AES key size, so we need
169 * no "size adaptor". */
172 yarrow_fast_reseed(struct yarrow256_ctx *ctx)
174 quint8 digest[SHA256_DIGEST_SIZE];
178 fprintf(stderr, "yarrow_fast_reseed\n");
181 /* We feed two block of output using the current key into the pool
182 * before emptying it. */
185 quint8 blocks[AES_BLOCK_SIZE * 2];
187 yarrow_generate_block(ctx, blocks);
188 yarrow_generate_block(ctx, blocks + AES_BLOCK_SIZE);
189 sha256_update(&ctx->pools[YARROW_FAST],blocks,sizeof(blocks));
192 sha256_finish(&ctx->pools[YARROW_FAST],digest);
195 yarrow_iterate(digest);
197 aes_encrypt_key256(digest,&ctx->key);
199 /* Derive new counter value */
200 memset(ctx->counter, 0, sizeof(ctx->counter));
201 //aes_encrypt(&ctx->key, sizeof(ctx->counter), ctx->counter, ctx->counter);
202 aes_ecb_encrypt(ctx->counter,ctx->counter,sizeof(ctx->counter),&ctx->key);
204 /* Reset estimates. */
205 for (i = 0; i<ctx->nsources; i++)
206 ctx->sources[i].estimate[YARROW_FAST] = 0;
209 /* FIXME: Extract this into a function of its own. */
210 for (i = 0; i < sizeof(ctx->seed_file); i+= AES_BLOCK_SIZE)
211 yarrow_generate_block(ctx, ctx->seed_file + i);
217 yarrow_slow_reseed(struct yarrow256_ctx *ctx)
219 quint8 digest[SHA256_DIGEST_SIZE];
223 fprintf(stderr, "yarrow_slow_reseed\n");
226 /* Get digest of the slow pool*/
228 sha256_finish(&ctx->pools[YARROW_SLOW], digest);
230 /* Feed it into the fast pool */
231 sha256_update(&ctx->pools[YARROW_FAST],digest, sizeof(digest));
233 yarrow_fast_reseed(ctx);
235 /* Reset estimates. */
236 for (i = 0; i<ctx->nsources; i++)
237 ctx->sources[i].estimate[YARROW_SLOW] = 0;
241 yarrow256_update(struct yarrow256_ctx *ctx,
242 unsigned source_index, unsigned entropy,
243 unsigned length, const quint8 *data)
245 enum yarrow_pool_id current;
246 struct yarrow_source *source;
248 assert(source_index < ctx->nsources);
251 /* Nothing happens */
254 source = &ctx->sources[source_index];
257 /* While seeding, use the slow pool */
258 current = YARROW_SLOW;
261 current = source->next;
262 source->next = (yarrow_pool_id)!source->next;
265 sha256_update(&ctx->pools[current],data,length);
267 /* NOTE: We should be careful to avoid overflows in the estimates. */
268 if (source->estimate[current] < YARROW_MAX_ENTROPY)
270 if (entropy > YARROW_MAX_ENTROPY)
271 entropy = YARROW_MAX_ENTROPY;
273 if ( (length < (YARROW_MAX_ENTROPY / YARROW_MULTIPLIER))
274 && (entropy > YARROW_MULTIPLIER * length) )
275 entropy = YARROW_MULTIPLIER * length;
277 /* FIXME: Calling a more sophisticated estimater should be done
280 entropy += source->estimate[current];
281 if (entropy > YARROW_MAX_ENTROPY)
282 entropy = YARROW_MAX_ENTROPY;
284 source->estimate[current] = entropy;
287 /* Check for seed/reseed */
293 "yarrow256_update: source_index = %d,\n"
294 " fast pool estimate = %d\n",
295 source_index, source->estimate[YARROW_FAST]);
297 if (source->estimate[YARROW_FAST] >= YARROW_FAST_THRESHOLD)
299 yarrow_fast_reseed(ctx);
307 /* FIXME: This is somewhat inefficient. It would be better to
308 * either maintain the count, or do this loop only if the
309 * current source just crossed the threshold. */
311 if (!yarrow256_needed_sources(ctx))
313 yarrow_slow_reseed(ctx);
327 yarrow_gate(struct yarrow256_ctx *ctx)
329 quint8 key[AES_MAX_KEY_SIZE];
332 for (i = 0; i < sizeof(key); i+= AES_BLOCK_SIZE)
333 yarrow_generate_block(ctx, key + i);
335 aes_encrypt_key256(key,&ctx->key);
339 yarrow256_random(struct yarrow256_ctx *ctx, unsigned length, quint8 *dst)
343 while (length >= AES_BLOCK_SIZE)
345 yarrow_generate_block(ctx, dst);
346 dst += AES_BLOCK_SIZE;
347 length -= AES_BLOCK_SIZE;
351 quint8 buffer[AES_BLOCK_SIZE];
353 assert(length < AES_BLOCK_SIZE);
354 yarrow_generate_block(ctx, buffer);
355 memcpy(dst, buffer, length);
361 yarrow256_is_seeded(struct yarrow256_ctx *ctx)
367 yarrow256_needed_sources(struct yarrow256_ctx *ctx)
369 /* FIXME: This is somewhat inefficient. It would be better to
370 * either maintain the count, or do this loop only if the
371 * current source just crossed the threshold. */
374 for (i = k = 0; i < ctx->nsources; i++)
375 if (ctx->sources[i].estimate[YARROW_SLOW] >= YARROW_SLOW_THRESHOLD)
380 "yarrow256_needed_sources: source_index = %d,\n"
381 " slow pool estimate = %d,\n"
382 " number of sources above threshold = %d\n",
383 source_index, source->estimate[YARROW_SLOW], k);
386 return (k < YARROW_SLOW_K) ? (YARROW_SLOW_K - k) : 0;
390 yarrow256_force_reseed(struct yarrow256_ctx *ctx)
392 yarrow_slow_reseed(ctx);
395 struct yarrow256_ctx WeakCtx;
396 struct yarrow256_ctx StrongCtx;
397 struct yarrow_source WeakSrc[2];
398 struct yarrow_source StrongSrc[2];
401 yarrow256_init(&WeakCtx,2,WeakSrc);
402 yarrow256_init(&StrongCtx,2,StrongSrc);
405 for (int i=0; i<2; i++){
406 getEntropy(buffer,100);
407 yarrowUpdateWeak(i,100*8,100,buffer);
411 void yarrowUpdateWeak(unsigned source, unsigned entropy, unsigned length, const quint8 *data){
412 yarrow256_update(&WeakCtx,source,entropy,length,data);
415 void yarrowUpdateStrong(unsigned source, unsigned entropy, unsigned length, const quint8 *data){
416 yarrow256_update(&StrongCtx,source,entropy,length,data);
419 void randomize(void* buffer, unsigned int length){
420 if(!yarrow256_is_seeded(&StrongCtx))
421 yarrow256_random(&WeakCtx,length,(quint8*)buffer);
423 yarrow256_random(&StrongCtx,length,(quint8*)buffer);
426 void strongRandomize(void* buffer, unsigned int length){
427 Q_ASSERT(yarrow256_is_seeded(&StrongCtx));
428 for(uint i=0; i<length;i++)
429 yarrow256_random(&StrongCtx,1,(quint8*)buffer+i);
432 void reseedStrongPool(quint8* buffer1,int l1,quint8* buffer2,int l2){
434 yarrow256_update(&StrongCtx,0,100,100,buffer1);
439 yarrow256_update(&StrongCtx,1,100,25,buffer2);
445 yarrow256_update(&StrongCtx,0,160,160,buffer1);
448 yarrow256_update(&StrongCtx,1,l1,l1,buffer1);
449 yarrow256_update(&StrongCtx,1,4*l2,l2,buffer2);
452 yarrow256_update(&StrongCtx,0,160,40,buffer2);
455 yarrow256_update(&StrongCtx,1,l2*4,l2,buffer2);
456 yarrow256_update(&StrongCtx,1,l1,l1,buffer1);