Don't try to download bad files again in tests to avoid random test fails

This commit is contained in:
shortcutme 2019-12-21 03:05:19 +01:00
parent c6b07f1294
commit 17fb740c51
No known key found for this signature in database
GPG Key ID: 5B63BAE6CB9613AE
1 changed files with 23 additions and 20 deletions

View File

@ -34,7 +34,7 @@ class TestSiteDownload:
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.storage.isFile("content.json")
@ -53,7 +53,7 @@ class TestSiteDownload:
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True).get(timeout=10) # Wait for download
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
assert "streamFile" not in [req[1] for req in requests]
content = site_temp.storage.loadJson("content.json")
@ -85,7 +85,7 @@ class TestSiteDownload:
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.settings["optional_downloaded"] == 0
@ -109,7 +109,7 @@ class TestSiteDownload:
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True).get(timeout=10) # Wait for download
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
assert "streamFile" not in [req[1] for req in requests]
content = site_temp.storage.loadJson("content.json")
@ -139,7 +139,7 @@ class TestSiteDownload:
# Download normally
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
assert not bad_files
@ -148,7 +148,7 @@ class TestSiteDownload:
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
# Add archived data
assert not "archived" in site.content_manager.contents["data/users/content.json"]["user_contents"]
assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"]
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()}
@ -163,7 +163,7 @@ class TestSiteDownload:
assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True).get(timeout=10) # Wait for download
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
# The archived content should disappear from remote client
assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
@ -187,7 +187,7 @@ class TestSiteDownload:
# Download normally
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
assert not bad_files
@ -212,7 +212,7 @@ class TestSiteDownload:
assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True).get(timeout=10) # Wait for download
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
# The archived content should disappear from remote client
assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
@ -239,7 +239,7 @@ class TestSiteDownload:
site_temp.addPeer(file_server.ip, 1544)
# Download site
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
# Download optional data/optional.txt
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
@ -304,7 +304,7 @@ class TestSiteDownload:
# Download normal files
site_temp.log.info("Start Downloading site")
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
# Download optional data/optional.txt
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
@ -357,7 +357,8 @@ class TestSiteDownload:
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert len(site_temp.bad_files) == 1
# Update file
data_original = site.storage.open("data/data.json").read()
@ -375,7 +376,8 @@ class TestSiteDownload:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True).get(timeout=10)
site.log.info("Downloading site")
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1
assert site_temp.storage.open("data/data.json").read() == data_new
@ -410,7 +412,7 @@ class TestSiteDownload:
event_done = gevent.event.AsyncResult()
site.publish(diffs=diffs)
time.sleep(0.1)
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert [request for request in requests if request[1] in ("getFile", "streamFile")] == []
assert site_temp.storage.open("data/data.json").read() == data_new
@ -432,7 +434,8 @@ class TestSiteDownload:
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
# Update file
data_original = site.storage.open("data/data.json").read()
@ -462,7 +465,7 @@ class TestSiteDownload:
assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json
site.publish(diffs=diffs)
time.sleep(0.1)
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")]
assert len(file_requests) == 1
@ -484,7 +487,7 @@ class TestSiteDownload:
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
site_temp.settings["size_limit"] = int(20 * 1024 *1024)
site_temp.saveSettings()
@ -510,7 +513,7 @@ class TestSiteDownload:
assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB
time.sleep(0.1)
site.publish(diffs=diffs)
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024
assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
@ -531,7 +534,7 @@ class TestSiteDownload:
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True).get(timeout=10)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
site.storage.write("data/img/árvíztűrő.png", b"test")
@ -545,7 +548,7 @@ class TestSiteDownload:
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True).get(timeout=10) # Wait for download
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1
content = site_temp.storage.loadJson("content.json")