Skip to content

Commit 0ae70e9

Browse files
committed
improve test
1 parent 398ddb3 commit 0ae70e9

1 file changed

Lines changed: 16 additions & 17 deletions

File tree

Lib/test/test_hashlib.py

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1055,7 +1055,7 @@ def test_sha256_gil(self):
10551055
@threading_helper.reap_threads
10561056
@threading_helper.requires_working_threading()
10571057
def test_threaded_hashing_fast(self):
1058-
# Same as test_threaded_hashing_slow() but only tests "fast" functions
1058+
# Same as test_threaded_hashing_slow() but only tests some functions
10591059
# since otherwise test_hashlib.py becomes too slow during development.
10601060
for name in ['md5', 'sha1', 'sha256', 'sha3_256', 'blake2s']:
10611061
if constructor := getattr(hashlib, name, None):
@@ -1081,30 +1081,29 @@ def do_test_threaded_hashing(self, constructor, is_shake):
10811081
# If the internal locks are working to prevent multiple
10821082
# updates on the same object from running at once, the resulting
10831083
# hash will be the same as doing it single threaded upfront.
1084-
#
1085-
# Be careful when choosing num_threads, len(smallest_data)
1086-
# and len(data) // len(smallest_data) as the obtained chunk
1087-
# size needs to satisfy some conditions below.
1088-
num_threads = 5
1089-
smallest_data = os.urandom(8)
1090-
data = smallest_data * 200000
1084+
1085+
# The data to hash has length s|M|q^N and the chunk size for the i-th
1086+
# thread is s|M|q^(N-i), where N is the number of threads, M is a fixed
1087+
# message of small length, and s >= 1 and q >= 2 are small integers.
1088+
smallest_size, num_threads, s, q = 8, 5, 2, 10
1089+
1090+
smallest_data = os.urandom(smallest_size)
1091+
data = s * smallest_data * (q ** num_threads)
10911092

10921093
h1 = constructor(usedforsecurity=False)
10931094
h2 = constructor(data * num_threads, usedforsecurity=False)
10941095

1095-
def hash_in_chunks(chunk_size):
1096-
index = 0
1097-
while index < len(data):
1096+
def update(chunk_size):
1097+
for index in range(0, len(data), chunk_size):
10981098
h1.update(data[index:index + chunk_size])
1099-
index += chunk_size
11001099

11011100
threads = []
1102-
for threadnum in range(num_threads):
1103-
chunk_size = len(data) // (10 ** threadnum)
1101+
for thread_num in range(num_threads):
1102+
# chunk_size = len(data) // (q ** thread_num)
1103+
chunk_size = s * smallest_size * q ** (num_threads - thread_num)
11041104
self.assertGreater(chunk_size, 0)
1105-
self.assertEqual(chunk_size % len(smallest_data), 0)
1106-
thread = threading.Thread(target=hash_in_chunks,
1107-
args=(chunk_size,))
1105+
self.assertEqual(chunk_size % smallest_size, 0)
1106+
thread = threading.Thread(target=update, args=(chunk_size,))
11081107
threads.append(thread)
11091108

11101109
for thread in threads:

0 commit comments

Comments
 (0)