1 # Test hashlib module
2 #
3 # $Id$
4 #
5 # Copyright (C) 2005-2010 Gregory P. Smith (greg@krypto.org)
6 # Licensed to PSF under a Contributor Agreement.
7 #
8
9 import array
10 from binascii import unhexlify
11 import hashlib
12 import importlib
13 import io
14 import itertools
15 import os
16 import sys
17 import sysconfig
18 import threading
19 import unittest
20 import warnings
21 from test import support
22 from test.support import _4G, bigmemtest
23 from test.support.import_helper import import_fresh_module
24 from test.support import os_helper
25 from test.support import threading_helper
26 from test.support import warnings_helper
27 from http.client import HTTPException
28
29 # Were we compiled --with-pydebug or with #define Py_DEBUG?
30 COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
31
32 # default builtin hash module
33 default_builtin_hashes = {'md5', 'sha1', 'sha256', 'sha512', 'sha3', 'blake2'}
34 # --with-builtin-hashlib-hashes override
35 builtin_hashes = sysconfig.get_config_var("PY_BUILTIN_HASHLIB_HASHES")
36 if builtin_hashes is None:
37 builtin_hashes = default_builtin_hashes
38 else:
39 builtin_hashes = {
40 m.strip() for m in builtin_hashes.strip('"').lower().split(",")
41 }
42
43 # hashlib with and without OpenSSL backend for PBKDF2
44 # only import builtin_hashlib when all builtin hashes are available.
45 # Otherwise import prints noise on stderr
46 openssl_hashlib = import_fresh_module('hashlib', fresh=['_hashlib'])
47 if builtin_hashes == default_builtin_hashes:
48 builtin_hashlib = import_fresh_module('hashlib', blocked=['_hashlib'])
49 else:
50 builtin_hashlib = None
51
52 try:
53 from _hashlib import HASH, HASHXOF, openssl_md_meth_names, get_fips_mode
54 except ImportError:
55 HASH = None
56 HASHXOF = None
57 openssl_md_meth_names = frozenset()
58
59 def get_fips_mode():
60 return 0
61
62 try:
63 import _blake2
64 except ImportError:
65 _blake2 = None
66
67 requires_blake2 = unittest.skipUnless(_blake2, 'requires _blake2')
68
69 # bpo-46913: Don't test the _sha3 extension on a Python UBSAN build
70 SKIP_SHA3 = support.check_sanitizer(ub=True)
71 requires_sha3 = unittest.skipUnless(not SKIP_SHA3, 'requires _sha3')
72
73
74 def hexstr(s):
75 assert isinstance(s, bytes), repr(s)
76 h = "0123456789abcdef"
77 r = ''
78 for i in s:
79 r += h[(i >> 4) & 0xF] + h[i & 0xF]
80 return r
81
82
83 URL = "http://www.pythontest.net/hashlib/{}.txt"
84
85 def read_vectors(hash_name):
86 url = URL.format(hash_name)
87 try:
88 testdata = support.open_urlresource(url, encoding="utf-8")
89 except (OSError, HTTPException):
90 raise unittest.SkipTest("Could not retrieve {}".format(url))
91 with testdata:
92 for line in testdata:
93 line = line.strip()
94 if line.startswith('#') or not line:
95 continue
96 parts = line.split(',')
97 parts[0] = bytes.fromhex(parts[0])
98 yield parts
99
100
101 class ESC[4;38;5;81mHashLibTestCase(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
102 supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
103 'sha224', 'SHA224', 'sha256', 'SHA256',
104 'sha384', 'SHA384', 'sha512', 'SHA512',
105 'blake2b', 'blake2s',
106 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
107 'shake_128', 'shake_256')
108
109 shakes = {'shake_128', 'shake_256'}
110
111 # Issue #14693: fallback modules are always compiled under POSIX
112 _warn_on_extension_import = os.name == 'posix' or COMPILED_WITH_PYDEBUG
113
114 def _conditional_import_module(self, module_name):
115 """Import a module and return a reference to it or None on failure."""
116 try:
117 return importlib.import_module(module_name)
118 except ModuleNotFoundError as error:
119 if self._warn_on_extension_import and module_name in builtin_hashes:
120 warnings.warn('Did a C extension fail to compile? %s' % error)
121 return None
122
123 def __init__(self, *args, **kwargs):
124 algorithms = set()
125 for algorithm in self.supported_hash_names:
126 algorithms.add(algorithm.lower())
127
128 _blake2 = self._conditional_import_module('_blake2')
129 if _blake2:
130 algorithms.update({'blake2b', 'blake2s'})
131
132 self.constructors_to_test = {}
133 for algorithm in algorithms:
134 if SKIP_SHA3 and algorithm.startswith('sha3_'):
135 continue
136 self.constructors_to_test[algorithm] = set()
137
138 # For each algorithm, test the direct constructor and the use
139 # of hashlib.new given the algorithm name.
140 for algorithm, constructors in self.constructors_to_test.items():
141 constructors.add(getattr(hashlib, algorithm))
142 def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, **kwargs):
143 if data is None:
144 return hashlib.new(_alg, **kwargs)
145 return hashlib.new(_alg, data, **kwargs)
146 constructors.add(_test_algorithm_via_hashlib_new)
147
148 _hashlib = self._conditional_import_module('_hashlib')
149 self._hashlib = _hashlib
150 if _hashlib:
151 # These two algorithms should always be present when this module
152 # is compiled. If not, something was compiled wrong.
153 self.assertTrue(hasattr(_hashlib, 'openssl_md5'))
154 self.assertTrue(hasattr(_hashlib, 'openssl_sha1'))
155 for algorithm, constructors in self.constructors_to_test.items():
156 constructor = getattr(_hashlib, 'openssl_'+algorithm, None)
157 if constructor:
158 try:
159 constructor()
160 except ValueError:
161 # default constructor blocked by crypto policy
162 pass
163 else:
164 constructors.add(constructor)
165
166 def add_builtin_constructor(name):
167 constructor = getattr(hashlib, "__get_builtin_constructor")(name)
168 self.constructors_to_test[name].add(constructor)
169
170 _md5 = self._conditional_import_module('_md5')
171 if _md5:
172 add_builtin_constructor('md5')
173 _sha1 = self._conditional_import_module('_sha1')
174 if _sha1:
175 add_builtin_constructor('sha1')
176 _sha256 = self._conditional_import_module('_sha256')
177 if _sha256:
178 add_builtin_constructor('sha224')
179 add_builtin_constructor('sha256')
180 _sha512 = self._conditional_import_module('_sha512')
181 if _sha512:
182 add_builtin_constructor('sha384')
183 add_builtin_constructor('sha512')
184 if _blake2:
185 add_builtin_constructor('blake2s')
186 add_builtin_constructor('blake2b')
187
188 if not SKIP_SHA3:
189 _sha3 = self._conditional_import_module('_sha3')
190 if _sha3:
191 add_builtin_constructor('sha3_224')
192 add_builtin_constructor('sha3_256')
193 add_builtin_constructor('sha3_384')
194 add_builtin_constructor('sha3_512')
195 add_builtin_constructor('shake_128')
196 add_builtin_constructor('shake_256')
197
198 super(HashLibTestCase, self).__init__(*args, **kwargs)
199
200 @property
201 def hash_constructors(self):
202 constructors = self.constructors_to_test.values()
203 return itertools.chain.from_iterable(constructors)
204
205 @property
206 def is_fips_mode(self):
207 return get_fips_mode()
208
209 def test_hash_array(self):
210 a = array.array("b", range(10))
211 for cons in self.hash_constructors:
212 c = cons(a, usedforsecurity=False)
213 if c.name in self.shakes:
214 c.hexdigest(16)
215 else:
216 c.hexdigest()
217
218 def test_algorithms_guaranteed(self):
219 self.assertEqual(hashlib.algorithms_guaranteed,
220 set(_algo for _algo in self.supported_hash_names
221 if _algo.islower()))
222
223 def test_algorithms_available(self):
224 self.assertTrue(set(hashlib.algorithms_guaranteed).
225 issubset(hashlib.algorithms_available))
226 # all available algorithms must be loadable, bpo-47101
227 self.assertNotIn("undefined", hashlib.algorithms_available)
228 for name in hashlib.algorithms_available:
229 digest = hashlib.new(name, usedforsecurity=False)
230
231 def test_usedforsecurity_true(self):
232 hashlib.new("sha256", usedforsecurity=True)
233 if self.is_fips_mode:
234 self.skipTest("skip in FIPS mode")
235 for cons in self.hash_constructors:
236 cons(usedforsecurity=True)
237 cons(b'', usedforsecurity=True)
238 hashlib.new("md5", usedforsecurity=True)
239 hashlib.md5(usedforsecurity=True)
240 if self._hashlib is not None:
241 self._hashlib.new("md5", usedforsecurity=True)
242 self._hashlib.openssl_md5(usedforsecurity=True)
243
244 def test_usedforsecurity_false(self):
245 hashlib.new("sha256", usedforsecurity=False)
246 for cons in self.hash_constructors:
247 cons(usedforsecurity=False)
248 cons(b'', usedforsecurity=False)
249 hashlib.new("md5", usedforsecurity=False)
250 hashlib.md5(usedforsecurity=False)
251 if self._hashlib is not None:
252 self._hashlib.new("md5", usedforsecurity=False)
253 self._hashlib.openssl_md5(usedforsecurity=False)
254
255 def test_unknown_hash(self):
256 self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam')
257 self.assertRaises(TypeError, hashlib.new, 1)
258
259 def test_new_upper_to_lower(self):
260 self.assertEqual(hashlib.new("SHA256").name, "sha256")
261
262 def test_get_builtin_constructor(self):
263 get_builtin_constructor = getattr(hashlib,
264 '__get_builtin_constructor')
265 builtin_constructor_cache = getattr(hashlib,
266 '__builtin_constructor_cache')
267 self.assertRaises(ValueError, get_builtin_constructor, 'test')
268 try:
269 import _md5
270 except ImportError:
271 self.skipTest("_md5 module not available")
272 # This forces an ImportError for "import _md5" statements
273 sys.modules['_md5'] = None
274 # clear the cache
275 builtin_constructor_cache.clear()
276 try:
277 self.assertRaises(ValueError, get_builtin_constructor, 'md5')
278 finally:
279 if '_md5' in locals():
280 sys.modules['_md5'] = _md5
281 else:
282 del sys.modules['_md5']
283 self.assertRaises(TypeError, get_builtin_constructor, 3)
284 constructor = get_builtin_constructor('md5')
285 self.assertIs(constructor, _md5.md5)
286 self.assertEqual(sorted(builtin_constructor_cache), ['MD5', 'md5'])
287
288 def test_hexdigest(self):
289 for cons in self.hash_constructors:
290 h = cons(usedforsecurity=False)
291 if h.name in self.shakes:
292 self.assertIsInstance(h.digest(16), bytes)
293 self.assertEqual(hexstr(h.digest(16)), h.hexdigest(16))
294 else:
295 self.assertIsInstance(h.digest(), bytes)
296 self.assertEqual(hexstr(h.digest()), h.hexdigest())
297
298 def test_digest_length_overflow(self):
299 # See issue #34922
300 large_sizes = (2**29, 2**32-10, 2**32+10, 2**61, 2**64-10, 2**64+10)
301 for cons in self.hash_constructors:
302 h = cons(usedforsecurity=False)
303 if h.name not in self.shakes:
304 continue
305 if HASH is not None and isinstance(h, HASH):
306 # _hashopenssl's take a size_t
307 continue
308 for digest in h.digest, h.hexdigest:
309 self.assertRaises(ValueError, digest, -10)
310 for length in large_sizes:
311 with self.assertRaises((ValueError, OverflowError)):
312 digest(length)
313
314 def test_name_attribute(self):
315 for cons in self.hash_constructors:
316 h = cons(usedforsecurity=False)
317 self.assertIsInstance(h.name, str)
318 if h.name in self.supported_hash_names:
319 self.assertIn(h.name, self.supported_hash_names)
320 else:
321 self.assertNotIn(h.name, self.supported_hash_names)
322 self.assertEqual(
323 h.name,
324 hashlib.new(h.name, usedforsecurity=False).name
325 )
326
327 def test_large_update(self):
328 aas = b'a' * 128
329 bees = b'b' * 127
330 cees = b'c' * 126
331 dees = b'd' * 2048 # HASHLIB_GIL_MINSIZE
332
333 for cons in self.hash_constructors:
334 m1 = cons(usedforsecurity=False)
335 m1.update(aas)
336 m1.update(bees)
337 m1.update(cees)
338 m1.update(dees)
339 if m1.name in self.shakes:
340 args = (16,)
341 else:
342 args = ()
343
344 m2 = cons(usedforsecurity=False)
345 m2.update(aas + bees + cees + dees)
346 self.assertEqual(m1.digest(*args), m2.digest(*args))
347
348 m3 = cons(aas + bees + cees + dees, usedforsecurity=False)
349 self.assertEqual(m1.digest(*args), m3.digest(*args))
350
351 # verify copy() doesn't touch original
352 m4 = cons(aas + bees + cees, usedforsecurity=False)
353 m4_digest = m4.digest(*args)
354 m4_copy = m4.copy()
355 m4_copy.update(dees)
356 self.assertEqual(m1.digest(*args), m4_copy.digest(*args))
357 self.assertEqual(m4.digest(*args), m4_digest)
358
359 def check(self, name, data, hexdigest, shake=False, **kwargs):
360 length = len(hexdigest)//2
361 hexdigest = hexdigest.lower()
362 constructors = self.constructors_to_test[name]
363 # 2 is for hashlib.name(...) and hashlib.new(name, ...)
364 self.assertGreaterEqual(len(constructors), 2)
365 for hash_object_constructor in constructors:
366 m = hash_object_constructor(data, **kwargs)
367 computed = m.hexdigest() if not shake else m.hexdigest(length)
368 self.assertEqual(
369 computed, hexdigest,
370 "Hash algorithm %s constructed using %s returned hexdigest"
371 " %r for %d byte input data that should have hashed to %r."
372 % (name, hash_object_constructor,
373 computed, len(data), hexdigest))
374 computed = m.digest() if not shake else m.digest(length)
375 digest = bytes.fromhex(hexdigest)
376 self.assertEqual(computed, digest)
377 if not shake:
378 self.assertEqual(len(digest), m.digest_size)
379
380 if not shake and kwargs.get("key") is None:
381 # skip shake and blake2 extended parameter tests
382 self.check_file_digest(name, data, hexdigest)
383
384 def check_file_digest(self, name, data, hexdigest):
385 hexdigest = hexdigest.lower()
386 try:
387 hashlib.new(name)
388 except ValueError:
389 # skip, algorithm is blocked by security policy.
390 return
391 digests = [name]
392 digests.extend(self.constructors_to_test[name])
393
394 with open(os_helper.TESTFN, "wb") as f:
395 f.write(data)
396
397 try:
398 for digest in digests:
399 buf = io.BytesIO(data)
400 buf.seek(0)
401 self.assertEqual(
402 hashlib.file_digest(buf, digest).hexdigest(), hexdigest
403 )
404 with open(os_helper.TESTFN, "rb") as f:
405 digestobj = hashlib.file_digest(f, digest)
406 self.assertEqual(digestobj.hexdigest(), hexdigest)
407 finally:
408 os.unlink(os_helper.TESTFN)
409
410 def check_no_unicode(self, algorithm_name):
411 # Unicode objects are not allowed as input.
412 constructors = self.constructors_to_test[algorithm_name]
413 for hash_object_constructor in constructors:
414 self.assertRaises(TypeError, hash_object_constructor, 'spam')
415
416 def test_no_unicode(self):
417 self.check_no_unicode('md5')
418 self.check_no_unicode('sha1')
419 self.check_no_unicode('sha224')
420 self.check_no_unicode('sha256')
421 self.check_no_unicode('sha384')
422 self.check_no_unicode('sha512')
423
424 @requires_blake2
425 def test_no_unicode_blake2(self):
426 self.check_no_unicode('blake2b')
427 self.check_no_unicode('blake2s')
428
429 @requires_sha3
430 def test_no_unicode_sha3(self):
431 self.check_no_unicode('sha3_224')
432 self.check_no_unicode('sha3_256')
433 self.check_no_unicode('sha3_384')
434 self.check_no_unicode('sha3_512')
435 self.check_no_unicode('shake_128')
436 self.check_no_unicode('shake_256')
437
438 def check_blocksize_name(self, name, block_size=0, digest_size=0,
439 digest_length=None):
440 constructors = self.constructors_to_test[name]
441 for hash_object_constructor in constructors:
442 m = hash_object_constructor(usedforsecurity=False)
443 self.assertEqual(m.block_size, block_size)
444 self.assertEqual(m.digest_size, digest_size)
445 if digest_length:
446 self.assertEqual(len(m.digest(digest_length)),
447 digest_length)
448 self.assertEqual(len(m.hexdigest(digest_length)),
449 2*digest_length)
450 else:
451 self.assertEqual(len(m.digest()), digest_size)
452 self.assertEqual(len(m.hexdigest()), 2*digest_size)
453 self.assertEqual(m.name, name)
454 # split for sha3_512 / _sha3.sha3 object
455 self.assertIn(name.split("_")[0], repr(m))
456
457 def test_blocksize_name(self):
458 self.check_blocksize_name('md5', 64, 16)
459 self.check_blocksize_name('sha1', 64, 20)
460 self.check_blocksize_name('sha224', 64, 28)
461 self.check_blocksize_name('sha256', 64, 32)
462 self.check_blocksize_name('sha384', 128, 48)
463 self.check_blocksize_name('sha512', 128, 64)
464
465 @requires_sha3
466 def test_blocksize_name_sha3(self):
467 self.check_blocksize_name('sha3_224', 144, 28)
468 self.check_blocksize_name('sha3_256', 136, 32)
469 self.check_blocksize_name('sha3_384', 104, 48)
470 self.check_blocksize_name('sha3_512', 72, 64)
471 self.check_blocksize_name('shake_128', 168, 0, 32)
472 self.check_blocksize_name('shake_256', 136, 0, 64)
473
474 def check_sha3(self, name, capacity, rate, suffix):
475 constructors = self.constructors_to_test[name]
476 for hash_object_constructor in constructors:
477 m = hash_object_constructor()
478 if HASH is not None and isinstance(m, HASH):
479 # _hashopenssl's variant does not have extra SHA3 attributes
480 continue
481 self.assertEqual(capacity + rate, 1600)
482 self.assertEqual(m._capacity_bits, capacity)
483 self.assertEqual(m._rate_bits, rate)
484 self.assertEqual(m._suffix, suffix)
485
486 @requires_sha3
487 def test_extra_sha3(self):
488 self.check_sha3('sha3_224', 448, 1152, b'\x06')
489 self.check_sha3('sha3_256', 512, 1088, b'\x06')
490 self.check_sha3('sha3_384', 768, 832, b'\x06')
491 self.check_sha3('sha3_512', 1024, 576, b'\x06')
492 self.check_sha3('shake_128', 256, 1344, b'\x1f')
493 self.check_sha3('shake_256', 512, 1088, b'\x1f')
494
495 @requires_blake2
496 def test_blocksize_name_blake2(self):
497 self.check_blocksize_name('blake2b', 128, 64)
498 self.check_blocksize_name('blake2s', 64, 32)
499
500 def test_case_md5_0(self):
501 self.check(
502 'md5', b'', 'd41d8cd98f00b204e9800998ecf8427e',
503 usedforsecurity=False
504 )
505
506 def test_case_md5_1(self):
507 self.check(
508 'md5', b'abc', '900150983cd24fb0d6963f7d28e17f72',
509 usedforsecurity=False
510 )
511
512 def test_case_md5_2(self):
513 self.check(
514 'md5',
515 b'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
516 'd174ab98d277d9f5a5611c2c9f419d9f',
517 usedforsecurity=False
518 )
519
520 @unittest.skipIf(sys.maxsize < _4G + 5, 'test cannot run on 32-bit systems')
521 @bigmemtest(size=_4G + 5, memuse=1, dry_run=False)
522 def test_case_md5_huge(self, size):
523 self.check('md5', b'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
524
525 @unittest.skipIf(sys.maxsize < _4G - 1, 'test cannot run on 32-bit systems')
526 @bigmemtest(size=_4G - 1, memuse=1, dry_run=False)
527 def test_case_md5_uintmax(self, size):
528 self.check('md5', b'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
529
530 # use the three examples from Federal Information Processing Standards
531 # Publication 180-1, Secure Hash Standard, 1995 April 17
532 # http://www.itl.nist.gov/div897/pubs/fip180-1.htm
533
534 def test_case_sha1_0(self):
535 self.check('sha1', b"",
536 "da39a3ee5e6b4b0d3255bfef95601890afd80709")
537
538 def test_case_sha1_1(self):
539 self.check('sha1', b"abc",
540 "a9993e364706816aba3e25717850c26c9cd0d89d")
541
542 def test_case_sha1_2(self):
543 self.check('sha1',
544 b"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
545 "84983e441c3bd26ebaae4aa1f95129e5e54670f1")
546
547 def test_case_sha1_3(self):
548 self.check('sha1', b"a" * 1000000,
549 "34aa973cd4c4daa4f61eeb2bdbad27316534016f")
550
551
552 # use the examples from Federal Information Processing Standards
553 # Publication 180-2, Secure Hash Standard, 2002 August 1
554 # http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
555
556 def test_case_sha224_0(self):
557 self.check('sha224', b"",
558 "d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f")
559
560 def test_case_sha224_1(self):
561 self.check('sha224', b"abc",
562 "23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7")
563
564 def test_case_sha224_2(self):
565 self.check('sha224',
566 b"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
567 "75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525")
568
569 def test_case_sha224_3(self):
570 self.check('sha224', b"a" * 1000000,
571 "20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67")
572
573
574 def test_case_sha256_0(self):
575 self.check('sha256', b"",
576 "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
577
578 def test_case_sha256_1(self):
579 self.check('sha256', b"abc",
580 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
581
582 def test_case_sha256_2(self):
583 self.check('sha256',
584 b"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
585 "248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1")
586
587 def test_case_sha256_3(self):
588 self.check('sha256', b"a" * 1000000,
589 "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0")
590
591
592 def test_case_sha384_0(self):
593 self.check('sha384', b"",
594 "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da"+
595 "274edebfe76f65fbd51ad2f14898b95b")
596
597 def test_case_sha384_1(self):
598 self.check('sha384', b"abc",
599 "cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed"+
600 "8086072ba1e7cc2358baeca134c825a7")
601
602 def test_case_sha384_2(self):
603 self.check('sha384',
604 b"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
605 b"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
606 "09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712"+
607 "fcc7c71a557e2db966c3e9fa91746039")
608
609 def test_case_sha384_3(self):
610 self.check('sha384', b"a" * 1000000,
611 "9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b"+
612 "07b8b3dc38ecc4ebae97ddd87f3d8985")
613
614
615 def test_case_sha512_0(self):
616 self.check('sha512', b"",
617 "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"+
618 "47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e")
619
620 def test_case_sha512_1(self):
621 self.check('sha512', b"abc",
622 "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a"+
623 "2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f")
624
625 def test_case_sha512_2(self):
626 self.check('sha512',
627 b"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
628 b"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
629 "8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+
630 "501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909")
631
632 def test_case_sha512_3(self):
633 self.check('sha512', b"a" * 1000000,
634 "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
635 "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
636
637 def check_blake2(self, constructor, salt_size, person_size, key_size,
638 digest_size, max_offset):
639 self.assertEqual(constructor.SALT_SIZE, salt_size)
640 for i in range(salt_size + 1):
641 constructor(salt=b'a' * i)
642 salt = b'a' * (salt_size + 1)
643 self.assertRaises(ValueError, constructor, salt=salt)
644
645 self.assertEqual(constructor.PERSON_SIZE, person_size)
646 for i in range(person_size+1):
647 constructor(person=b'a' * i)
648 person = b'a' * (person_size + 1)
649 self.assertRaises(ValueError, constructor, person=person)
650
651 self.assertEqual(constructor.MAX_DIGEST_SIZE, digest_size)
652 for i in range(1, digest_size + 1):
653 constructor(digest_size=i)
654 self.assertRaises(ValueError, constructor, digest_size=-1)
655 self.assertRaises(ValueError, constructor, digest_size=0)
656 self.assertRaises(ValueError, constructor, digest_size=digest_size+1)
657
658 self.assertEqual(constructor.MAX_KEY_SIZE, key_size)
659 for i in range(key_size+1):
660 constructor(key=b'a' * i)
661 key = b'a' * (key_size + 1)
662 self.assertRaises(ValueError, constructor, key=key)
663 self.assertEqual(constructor().hexdigest(),
664 constructor(key=b'').hexdigest())
665
666 for i in range(0, 256):
667 constructor(fanout=i)
668 self.assertRaises(ValueError, constructor, fanout=-1)
669 self.assertRaises(ValueError, constructor, fanout=256)
670
671 for i in range(1, 256):
672 constructor(depth=i)
673 self.assertRaises(ValueError, constructor, depth=-1)
674 self.assertRaises(ValueError, constructor, depth=0)
675 self.assertRaises(ValueError, constructor, depth=256)
676
677 for i in range(0, 256):
678 constructor(node_depth=i)
679 self.assertRaises(ValueError, constructor, node_depth=-1)
680 self.assertRaises(ValueError, constructor, node_depth=256)
681
682 for i in range(0, digest_size + 1):
683 constructor(inner_size=i)
684 self.assertRaises(ValueError, constructor, inner_size=-1)
685 self.assertRaises(ValueError, constructor, inner_size=digest_size+1)
686
687 constructor(leaf_size=0)
688 constructor(leaf_size=(1<<32)-1)
689 self.assertRaises(ValueError, constructor, leaf_size=-1)
690 self.assertRaises(OverflowError, constructor, leaf_size=1<<32)
691
692 constructor(node_offset=0)
693 constructor(node_offset=max_offset)
694 self.assertRaises(ValueError, constructor, node_offset=-1)
695 self.assertRaises(OverflowError, constructor, node_offset=max_offset+1)
696
697 self.assertRaises(TypeError, constructor, data=b'')
698 self.assertRaises(TypeError, constructor, string=b'')
699 self.assertRaises(TypeError, constructor, '')
700
701 constructor(
702 b'',
703 key=b'',
704 salt=b'',
705 person=b'',
706 digest_size=17,
707 fanout=1,
708 depth=1,
709 leaf_size=256,
710 node_offset=512,
711 node_depth=1,
712 inner_size=7,
713 last_node=True
714 )
715
716 def blake2_rfc7693(self, constructor, md_len, in_len):
717 def selftest_seq(length, seed):
718 mask = (1<<32)-1
719 a = (0xDEAD4BAD * seed) & mask
720 b = 1
721 out = bytearray(length)
722 for i in range(length):
723 t = (a + b) & mask
724 a, b = b, t
725 out[i] = (t >> 24) & 0xFF
726 return out
727 outer = constructor(digest_size=32)
728 for outlen in md_len:
729 for inlen in in_len:
730 indata = selftest_seq(inlen, inlen)
731 key = selftest_seq(outlen, outlen)
732 unkeyed = constructor(indata, digest_size=outlen)
733 outer.update(unkeyed.digest())
734 keyed = constructor(indata, key=key, digest_size=outlen)
735 outer.update(keyed.digest())
736 return outer.hexdigest()
737
738 @requires_blake2
739 def test_blake2b(self):
740 self.check_blake2(hashlib.blake2b, 16, 16, 64, 64, (1<<64)-1)
741 b2b_md_len = [20, 32, 48, 64]
742 b2b_in_len = [0, 3, 128, 129, 255, 1024]
743 self.assertEqual(
744 self.blake2_rfc7693(hashlib.blake2b, b2b_md_len, b2b_in_len),
745 "c23a7800d98123bd10f506c61e29da5603d763b8bbad2e737f5e765a7bccd475")
746
747 @requires_blake2
748 def test_case_blake2b_0(self):
749 self.check('blake2b', b"",
750 "786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419"+
751 "d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce")
752
753 @requires_blake2
754 def test_case_blake2b_1(self):
755 self.check('blake2b', b"abc",
756 "ba80a53f981c4d0d6a2797b69f12f6e94c212f14685ac4b74b12bb6fdbffa2d1"+
757 "7d87c5392aab792dc252d5de4533cc9518d38aa8dbf1925ab92386edd4009923")
758
759 @requires_blake2
760 def test_case_blake2b_all_parameters(self):
761 # This checks that all the parameters work in general, and also that
762 # parameter byte order doesn't get confused on big endian platforms.
763 self.check('blake2b', b"foo",
764 "920568b0c5873b2f0ab67bedb6cf1b2b",
765 digest_size=16,
766 key=b"bar",
767 salt=b"baz",
768 person=b"bing",
769 fanout=2,
770 depth=3,
771 leaf_size=4,
772 node_offset=5,
773 node_depth=6,
774 inner_size=7,
775 last_node=True)
776
777 @requires_blake2
778 def test_blake2b_vectors(self):
779 for msg, key, md in read_vectors('blake2b'):
780 key = bytes.fromhex(key)
781 self.check('blake2b', msg, md, key=key)
782
783 @requires_blake2
784 def test_blake2s(self):
785 self.check_blake2(hashlib.blake2s, 8, 8, 32, 32, (1<<48)-1)
786 b2s_md_len = [16, 20, 28, 32]
787 b2s_in_len = [0, 3, 64, 65, 255, 1024]
788 self.assertEqual(
789 self.blake2_rfc7693(hashlib.blake2s, b2s_md_len, b2s_in_len),
790 "6a411f08ce25adcdfb02aba641451cec53c598b24f4fc787fbdc88797f4c1dfe")
791
792 @requires_blake2
793 def test_case_blake2s_0(self):
794 self.check('blake2s', b"",
795 "69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9")
796
797 @requires_blake2
798 def test_case_blake2s_1(self):
799 self.check('blake2s', b"abc",
800 "508c5e8c327c14e2e1a72ba34eeb452f37458b209ed63a294d999b4c86675982")
801
802 @requires_blake2
803 def test_case_blake2s_all_parameters(self):
804 # This checks that all the parameters work in general, and also that
805 # parameter byte order doesn't get confused on big endian platforms.
806 self.check('blake2s', b"foo",
807 "bf2a8f7fe3c555012a6f8046e646bc75",
808 digest_size=16,
809 key=b"bar",
810 salt=b"baz",
811 person=b"bing",
812 fanout=2,
813 depth=3,
814 leaf_size=4,
815 node_offset=5,
816 node_depth=6,
817 inner_size=7,
818 last_node=True)
819
820 @requires_blake2
821 def test_blake2s_vectors(self):
822 for msg, key, md in read_vectors('blake2s'):
823 key = bytes.fromhex(key)
824 self.check('blake2s', msg, md, key=key)
825
826 @requires_sha3
827 def test_case_sha3_224_0(self):
828 self.check('sha3_224', b"",
829 "6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7")
830
831 @requires_sha3
832 def test_case_sha3_224_vector(self):
833 for msg, md in read_vectors('sha3_224'):
834 self.check('sha3_224', msg, md)
835
836 @requires_sha3
837 def test_case_sha3_256_0(self):
838 self.check('sha3_256', b"",
839 "a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a")
840
841 @requires_sha3
842 def test_case_sha3_256_vector(self):
843 for msg, md in read_vectors('sha3_256'):
844 self.check('sha3_256', msg, md)
845
846 @requires_sha3
847 def test_case_sha3_384_0(self):
848 self.check('sha3_384', b"",
849 "0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2a"+
850 "c3713831264adb47fb6bd1e058d5f004")
851
852 @requires_sha3
853 def test_case_sha3_384_vector(self):
854 for msg, md in read_vectors('sha3_384'):
855 self.check('sha3_384', msg, md)
856
857 @requires_sha3
858 def test_case_sha3_512_0(self):
859 self.check('sha3_512', b"",
860 "a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a6"+
861 "15b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26")
862
863 @requires_sha3
864 def test_case_sha3_512_vector(self):
865 for msg, md in read_vectors('sha3_512'):
866 self.check('sha3_512', msg, md)
867
868 def test_case_shake_128_0(self):
869 self.check('shake_128', b"",
870 "7f9c2ba4e88f827d616045507605853ed73b8093f6efbc88eb1a6eacfa66ef26",
871 True)
872 self.check('shake_128', b"", "7f9c", True)
873
874 def test_case_shake128_vector(self):
875 for msg, md in read_vectors('shake_128'):
876 self.check('shake_128', msg, md, True)
877
878 def test_case_shake_256_0(self):
879 self.check('shake_256', b"",
880 "46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f",
881 True)
882 self.check('shake_256', b"", "46b9", True)
883
884 def test_case_shake256_vector(self):
885 for msg, md in read_vectors('shake_256'):
886 self.check('shake_256', msg, md, True)
887
888 def test_gil(self):
889 # Check things work fine with an input larger than the size required
890 # for multithreaded operation (which is hardwired to 2048).
891 gil_minsize = 2048
892
893 for cons in self.hash_constructors:
894 m = cons(usedforsecurity=False)
895 m.update(b'1')
896 m.update(b'#' * gil_minsize)
897 m.update(b'1')
898
899 m = cons(b'x' * gil_minsize, usedforsecurity=False)
900 m.update(b'1')
901
902 m = hashlib.sha256()
903 m.update(b'1')
904 m.update(b'#' * gil_minsize)
905 m.update(b'1')
906 self.assertEqual(
907 m.hexdigest(),
908 '1cfceca95989f51f658e3f3ffe7f1cd43726c9e088c13ee10b46f57cef135b94'
909 )
910
911 m = hashlib.sha256(b'1' + b'#' * gil_minsize + b'1')
912 self.assertEqual(
913 m.hexdigest(),
914 '1cfceca95989f51f658e3f3ffe7f1cd43726c9e088c13ee10b46f57cef135b94'
915 )
916
917 @threading_helper.reap_threads
918 @threading_helper.requires_working_threading()
919 def test_threaded_hashing(self):
920 # Updating the same hash object from several threads at once
921 # using data chunk sizes containing the same byte sequences.
922 #
923 # If the internal locks are working to prevent multiple
924 # updates on the same object from running at once, the resulting
925 # hash will be the same as doing it single threaded upfront.
926 hasher = hashlib.sha1()
927 num_threads = 5
928 smallest_data = b'swineflu'
929 data = smallest_data * 200000
930 expected_hash = hashlib.sha1(data*num_threads).hexdigest()
931
932 def hash_in_chunks(chunk_size):
933 index = 0
934 while index < len(data):
935 hasher.update(data[index:index + chunk_size])
936 index += chunk_size
937
938 threads = []
939 for threadnum in range(num_threads):
940 chunk_size = len(data) // (10 ** threadnum)
941 self.assertGreater(chunk_size, 0)
942 self.assertEqual(chunk_size % len(smallest_data), 0)
943 thread = threading.Thread(target=hash_in_chunks,
944 args=(chunk_size,))
945 threads.append(thread)
946
947 for thread in threads:
948 thread.start()
949 for thread in threads:
950 thread.join()
951
952 self.assertEqual(expected_hash, hasher.hexdigest())
953
954 def test_get_fips_mode(self):
955 fips_mode = self.is_fips_mode
956 if fips_mode is not None:
957 self.assertIsInstance(fips_mode, int)
958
959 @support.cpython_only
960 def test_disallow_instantiation(self):
961 for algorithm, constructors in self.constructors_to_test.items():
962 if algorithm.startswith(("sha3_", "shake", "blake")):
963 # _sha3 and _blake types can be instantiated
964 continue
965 # all other types have DISALLOW_INSTANTIATION
966 for constructor in constructors:
967 # In FIPS mode some algorithms are not available raising ValueError
968 try:
969 h = constructor()
970 except ValueError:
971 continue
972 with self.subTest(constructor=constructor):
973 support.check_disallow_instantiation(self, type(h))
974
975 @unittest.skipUnless(HASH is not None, 'need _hashlib')
976 def test_hash_disallow_instantiation(self):
977 # internal types like _hashlib.HASH are not constructable
978 support.check_disallow_instantiation(self, HASH)
979 support.check_disallow_instantiation(self, HASHXOF)
980
981 def test_readonly_types(self):
982 for algorithm, constructors in self.constructors_to_test.items():
983 # all other types have DISALLOW_INSTANTIATION
984 for constructor in constructors:
985 # In FIPS mode some algorithms are not available raising ValueError
986 try:
987 hash_type = type(constructor())
988 except ValueError:
989 continue
990 with self.subTest(hash_type=hash_type):
991 with self.assertRaisesRegex(TypeError, "immutable type"):
992 hash_type.value = False
993
994
995 class ESC[4;38;5;81mKDFTests(ESC[4;38;5;149munittestESC[4;38;5;149m.ESC[4;38;5;149mTestCase):
996
997 pbkdf2_test_vectors = [
998 (b'password', b'salt', 1, None),
999 (b'password', b'salt', 2, None),
1000 (b'password', b'salt', 4096, None),
1001 # too slow, it takes over a minute on a fast CPU.
1002 #(b'password', b'salt', 16777216, None),
1003 (b'passwordPASSWORDpassword', b'saltSALTsaltSALTsaltSALTsaltSALTsalt',
1004 4096, -1),
1005 (b'pass\0word', b'sa\0lt', 4096, 16),
1006 ]
1007
1008 scrypt_test_vectors = [
1009 (b'', b'', 16, 1, 1, unhexlify('77d6576238657b203b19ca42c18a0497f16b4844e3074ae8dfdffa3fede21442fcd0069ded0948f8326a753a0fc81f17e8d3e0fb2e0d3628cf35e20c38d18906')),
1010 (b'password', b'NaCl', 1024, 8, 16, unhexlify('fdbabe1c9d3472007856e7190d01e9fe7c6ad7cbc8237830e77376634b3731622eaf30d92e22a3886ff109279d9830dac727afb94a83ee6d8360cbdfa2cc0640')),
1011 (b'pleaseletmein', b'SodiumChloride', 16384, 8, 1, unhexlify('7023bdcb3afd7348461c06cd81fd38ebfda8fbba904f8e3ea9b543f6545da1f2d5432955613f0fcf62d49705242a9af9e61e85dc0d651e40dfcf017b45575887')),
1012 ]
1013
1014 pbkdf2_results = {
1015 "sha1": [
1016 # official test vectors from RFC 6070
1017 (bytes.fromhex('0c60c80f961f0e71f3a9b524af6012062fe037a6'), None),
1018 (bytes.fromhex('ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957'), None),
1019 (bytes.fromhex('4b007901b765489abead49d926f721d065a429c1'), None),
1020 #(bytes.fromhex('eefe3d61cd4da4e4e9945b3d6ba2158c2634e984'), None),
1021 (bytes.fromhex('3d2eec4fe41c849b80c8d83662c0e44a8b291a964c'
1022 'f2f07038'), 25),
1023 (bytes.fromhex('56fa6aa75548099dcc37d7f03425e0c3'), None),],
1024 "sha256": [
1025 (bytes.fromhex('120fb6cffcf8b32c43e7225256c4f837'
1026 'a86548c92ccc35480805987cb70be17b'), None),
1027 (bytes.fromhex('ae4d0c95af6b46d32d0adff928f06dd0'
1028 '2a303f8ef3c251dfd6e2d85a95474c43'), None),
1029 (bytes.fromhex('c5e478d59288c841aa530db6845c4c8d'
1030 '962893a001ce4e11a4963873aa98134a'), None),
1031 #(bytes.fromhex('cf81c66fe8cfc04d1f31ecb65dab4089'
1032 # 'f7f179e89b3b0bcb17ad10e3ac6eba46'), None),
1033 (bytes.fromhex('348c89dbcbd32b2f32d814b8116e84cf2b17'
1034 '347ebc1800181c4e2a1fb8dd53e1c635518c7dac47e9'), 40),
1035 (bytes.fromhex('89b69d0516f829893c696226650a8687'), None),],
1036 "sha512": [
1037 (bytes.fromhex('867f70cf1ade02cff3752599a3a53dc4af34c7a669815ae5'
1038 'd513554e1c8cf252c02d470a285a0501bad999bfe943c08f'
1039 '050235d7d68b1da55e63f73b60a57fce'), None),
1040 (bytes.fromhex('e1d9c16aa681708a45f5c7c4e215ceb66e011a2e9f004071'
1041 '3f18aefdb866d53cf76cab2868a39b9f7840edce4fef5a82'
1042 'be67335c77a6068e04112754f27ccf4e'), None),
1043 (bytes.fromhex('d197b1b33db0143e018b12f3d1d1479e6cdebdcc97c5c0f8'
1044 '7f6902e072f457b5143f30602641b3d55cd335988cb36b84'
1045 '376060ecd532e039b742a239434af2d5'), None),
1046 (bytes.fromhex('8c0511f4c6e597c6ac6315d8f0362e225f3c501495ba23b8'
1047 '68c005174dc4ee71115b59f9e60cd9532fa33e0f75aefe30'
1048 '225c583a186cd82bd4daea9724a3d3b8'), 64),
1049 (bytes.fromhex('9d9e9c4cd21fe4be24d5b8244c759665'), None),],
1050 }
1051
1052 def _test_pbkdf2_hmac(self, pbkdf2, supported):
1053 for digest_name, results in self.pbkdf2_results.items():
1054 if digest_name not in supported:
1055 continue
1056 for i, vector in enumerate(self.pbkdf2_test_vectors):
1057 password, salt, rounds, dklen = vector
1058 expected, overwrite_dklen = results[i]
1059 if overwrite_dklen:
1060 dklen = overwrite_dklen
1061 out = pbkdf2(digest_name, password, salt, rounds, dklen)
1062 self.assertEqual(out, expected,
1063 (digest_name, password, salt, rounds, dklen))
1064 out = pbkdf2(digest_name, memoryview(password),
1065 memoryview(salt), rounds, dklen)
1066 self.assertEqual(out, expected)
1067 out = pbkdf2(digest_name, bytearray(password),
1068 bytearray(salt), rounds, dklen)
1069 self.assertEqual(out, expected)
1070 if dklen is None:
1071 out = pbkdf2(digest_name, password, salt, rounds)
1072 self.assertEqual(out, expected,
1073 (digest_name, password, salt, rounds))
1074
1075 with self.assertRaisesRegex(ValueError, '.*unsupported.*'):
1076 pbkdf2('unknown', b'pass', b'salt', 1)
1077
1078 if 'sha1' in supported:
1079 self.assertRaises(
1080 TypeError, pbkdf2, b'sha1', b'pass', b'salt', 1
1081 )
1082 self.assertRaises(
1083 TypeError, pbkdf2, 'sha1', 'pass', 'salt', 1
1084 )
1085 self.assertRaises(
1086 ValueError, pbkdf2, 'sha1', b'pass', b'salt', 0
1087 )
1088 self.assertRaises(
1089 ValueError, pbkdf2, 'sha1', b'pass', b'salt', -1
1090 )
1091 self.assertRaises(
1092 ValueError, pbkdf2, 'sha1', b'pass', b'salt', 1, 0
1093 )
1094 self.assertRaises(
1095 ValueError, pbkdf2, 'sha1', b'pass', b'salt', 1, -1
1096 )
1097 out = pbkdf2(hash_name='sha1', password=b'password', salt=b'salt',
1098 iterations=1, dklen=None)
1099 self.assertEqual(out, self.pbkdf2_results['sha1'][0][0])
1100
1101 @unittest.skipIf(builtin_hashlib is None, "test requires builtin_hashlib")
1102 def test_pbkdf2_hmac_py(self):
1103 with warnings_helper.check_warnings():
1104 self._test_pbkdf2_hmac(
1105 builtin_hashlib.pbkdf2_hmac, builtin_hashes
1106 )
1107
1108 @unittest.skipUnless(hasattr(openssl_hashlib, 'pbkdf2_hmac'),
1109 ' test requires OpenSSL > 1.0')
1110 def test_pbkdf2_hmac_c(self):
1111 self._test_pbkdf2_hmac(openssl_hashlib.pbkdf2_hmac, openssl_md_meth_names)
1112
1113 @unittest.skipUnless(hasattr(hashlib, 'scrypt'),
1114 ' test requires OpenSSL > 1.1')
1115 @unittest.skipIf(get_fips_mode(), reason="scrypt is blocked in FIPS mode")
1116 def test_scrypt(self):
1117 for password, salt, n, r, p, expected in self.scrypt_test_vectors:
1118 result = hashlib.scrypt(password, salt=salt, n=n, r=r, p=p)
1119 self.assertEqual(result, expected)
1120
1121 # this values should work
1122 hashlib.scrypt(b'password', salt=b'salt', n=2, r=8, p=1)
1123 # password and salt must be bytes-like
1124 with self.assertRaises(TypeError):
1125 hashlib.scrypt('password', salt=b'salt', n=2, r=8, p=1)
1126 with self.assertRaises(TypeError):
1127 hashlib.scrypt(b'password', salt='salt', n=2, r=8, p=1)
1128 # require keyword args
1129 with self.assertRaises(TypeError):
1130 hashlib.scrypt(b'password')
1131 with self.assertRaises(TypeError):
1132 hashlib.scrypt(b'password', b'salt')
1133 with self.assertRaises(TypeError):
1134 hashlib.scrypt(b'password', 2, 8, 1, salt=b'salt')
1135 for n in [-1, 0, 1, None]:
1136 with self.assertRaises((ValueError, OverflowError, TypeError)):
1137 hashlib.scrypt(b'password', salt=b'salt', n=n, r=8, p=1)
1138 for r in [-1, 0, None]:
1139 with self.assertRaises((ValueError, OverflowError, TypeError)):
1140 hashlib.scrypt(b'password', salt=b'salt', n=2, r=r, p=1)
1141 for p in [-1, 0, None]:
1142 with self.assertRaises((ValueError, OverflowError, TypeError)):
1143 hashlib.scrypt(b'password', salt=b'salt', n=2, r=8, p=p)
1144 for maxmem in [-1, None]:
1145 with self.assertRaises((ValueError, OverflowError, TypeError)):
1146 hashlib.scrypt(b'password', salt=b'salt', n=2, r=8, p=1,
1147 maxmem=maxmem)
1148 for dklen in [-1, None]:
1149 with self.assertRaises((ValueError, OverflowError, TypeError)):
1150 hashlib.scrypt(b'password', salt=b'salt', n=2, r=8, p=1,
1151 dklen=dklen)
1152
1153 def test_normalized_name(self):
1154 self.assertNotIn("blake2b512", hashlib.algorithms_available)
1155 self.assertNotIn("sha3-512", hashlib.algorithms_available)
1156
1157 def test_file_digest(self):
1158 data = b'a' * 65536
1159 d1 = hashlib.sha256()
1160 self.addCleanup(os.unlink, os_helper.TESTFN)
1161 with open(os_helper.TESTFN, "wb") as f:
1162 for _ in range(10):
1163 d1.update(data)
1164 f.write(data)
1165
1166 with open(os_helper.TESTFN, "rb") as f:
1167 d2 = hashlib.file_digest(f, hashlib.sha256)
1168
1169 self.assertEqual(d1.hexdigest(), d2.hexdigest())
1170 self.assertEqual(d1.name, d2.name)
1171 self.assertIs(type(d1), type(d2))
1172
1173 with self.assertRaises(ValueError):
1174 hashlib.file_digest(None, "sha256")
1175
1176 with self.assertRaises(ValueError):
1177 with open(os_helper.TESTFN, "r") as f:
1178 hashlib.file_digest(f, "sha256")
1179
1180 with self.assertRaises(ValueError):
1181 with open(os_helper.TESTFN, "wb") as f:
1182 hashlib.file_digest(f, "sha256")
1183
1184
1185 if __name__ == "__main__":
1186 unittest.main()