Skip to content

Commit

Permalink
tests: that parallel downloads work
Browse files Browse the repository at this point in the history
  • Loading branch information
Wazzaps committed Apr 17, 2023
1 parent f3a4cf7 commit 068a0c2
Showing 1 changed file with 31 additions and 0 deletions.
31 changes: 31 additions & 0 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import gzip
import multiprocessing
import re
import subprocess
import tarfile
Expand Down Expand Up @@ -493,6 +494,7 @@ def test_stress(clean_repos, upload, list_, download):
print(f'Listed {COUNT} artifacts in {list_duration} seconds')
assert list_duration < 5, f"Listing {COUNT} artifacts took too long"


@pytest.mark.parametrize('is_compressed', ['compressed', 'uncompressed'])
def test_download_big_file(clean_repos, upload, download, big_file, is_compressed):
_ = clean_repos
Expand All @@ -503,3 +505,32 @@ def test_download_big_file(clean_repos, upload, download, big_file, is_compresse
)
artifact_path = download(f'foo:{artifact_hash}', {})
assert_files_identical(artifact_path, big_file)


@pytest.mark.parametrize('is_compressed', ['compressed', 'uncompressed'])
def test_parallel_download_single_file(clean_repos, upload, download, big_file, is_compressed):
_ = clean_repos

artifact1_hash = upload(
str(big_file),
artifact_type='foo',
compressed=is_compressed == 'compressed'
)
artifact2_hash = upload(
str(big_file),
artifact_type='foo',
compressed=is_compressed == 'compressed'
)

threads = []
for artifact_hash in (artifact1_hash, artifact2_hash):
def inner():
artifact_path = download(f'foo:{artifact_hash}', {})
assert_files_identical(artifact_path, big_file)

t = multiprocessing.Process(target=inner)
threads.append(t)
t.start()

for t in threads:
t.join(timeout=60)

0 comments on commit 068a0c2

Please sign in to comment.