ZeroNet/src/Test/TestSiteDownload.py

563 lines
28 KiB
Python
Raw Normal View History

import time
import pytest
import mock
import gevent
2019-12-17 15:01:15 +01:00
import gevent.event
2019-04-23 01:59:12 +02:00
import os
from Connection import ConnectionServer
from Config import config
from File import FileRequest
from File import FileServer
from Site.Site import Site
2019-03-15 21:06:59 +01:00
from . import Spy
@pytest.mark.usefixtures("resetTempSettings")
@pytest.mark.usefixtures("resetSettings")
class TestSiteDownload:
2019-04-23 01:59:12 +02:00
def testRename(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
2019-04-23 01:59:12 +02:00
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
2019-04-23 01:59:12 +02:00
2019-01-20 19:07:16 +01:00
site_temp.addPeer(file_server.ip, 1544)
2019-04-23 01:59:12 +02:00
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
2019-04-23 01:59:12 +02:00
assert site_temp.storage.isFile("content.json")
2019-04-23 01:59:12 +02:00
# Rename non-optional file
os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png"))
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
content = site.storage.loadJson("content.json")
assert "data/img/domain-new.png" in content["files"]
assert "data/img/domain.png" not in content["files"]
assert not site_temp.storage.isFile("data/img/domain-new.png")
assert site_temp.storage.isFile("data/img/domain.png")
settings_before = site_temp.settings
with Spy.Spy(FileRequest, "route") as requests:
2019-04-23 01:59:12 +02:00
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
2019-04-23 01:59:12 +02:00
assert "streamFile" not in [req[1] for req in requests]
content = site_temp.storage.loadJson("content.json")
assert "data/img/domain-new.png" in content["files"]
assert "data/img/domain.png" not in content["files"]
assert site_temp.storage.isFile("data/img/domain-new.png")
assert not site_temp.storage.isFile("data/img/domain.png")
assert site_temp.settings["size"] == settings_before["size"]
assert site_temp.settings["size_optional"] == settings_before["size_optional"]
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testRenameOptional(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
2019-04-23 01:59:12 +02:00
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
2019-04-23 01:59:12 +02:00
assert site_temp.settings["optional_downloaded"] == 0
site_temp.needFile("data/optional.txt")
assert site_temp.settings["optional_downloaded"] > 0
settings_before = site_temp.settings
hashfield_before = site_temp.content_manager.hashfield.tobytes()
# Rename optional file
os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt"))
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True)
content = site.storage.loadJson("content.json")
assert "data/optional-new.txt" in content["files_optional"]
assert "data/optional.txt" not in content["files_optional"]
assert not site_temp.storage.isFile("data/optional-new.txt")
assert site_temp.storage.isFile("data/optional.txt")
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
2019-04-23 01:59:12 +02:00
assert "streamFile" not in [req[1] for req in requests]
2019-04-23 01:59:12 +02:00
content = site_temp.storage.loadJson("content.json")
assert "data/optional-new.txt" in content["files_optional"]
assert "data/optional.txt" not in content["files_optional"]
assert site_temp.storage.isFile("data/optional-new.txt")
assert not site_temp.storage.isFile("data/optional.txt")
2019-04-23 01:59:12 +02:00
assert site_temp.settings["size"] == settings_before["size"]
assert site_temp.settings["size_optional"] == settings_before["size_optional"]
assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"]
assert site_temp.content_manager.hashfield.tobytes() == hashfield_before
2016-04-06 14:01:20 +02:00
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
2016-08-10 12:59:42 +02:00
def testArchivedDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
2019-01-20 19:07:16 +01:00
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
2016-08-10 12:59:42 +02:00
site_temp.connection_server = client
# Download normally
2019-01-20 19:07:16 +01:00
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
2016-08-10 12:59:42 +02:00
assert not bad_files
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
# Add archived data
assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"]
2016-08-10 12:59:42 +02:00
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()}
site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"]
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
# Push archived update
assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
2018-02-20 10:18:35 +01:00
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
2016-08-10 12:59:42 +02:00
# The archived content should disappear from remote client
assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
2018-10-15 12:58:39 +02:00
def testArchivedBeforeDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
2019-01-20 19:07:16 +01:00
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
2018-10-15 12:58:39 +02:00
site_temp.connection_server = client
# Download normally
2019-01-20 19:07:16 +01:00
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
2018-10-15 12:58:39 +02:00
bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"]
assert not bad_files
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
# Add archived data
assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"]
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"]
site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time
site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"]
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
# Push archived update
assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
2018-10-15 12:58:39 +02:00
# The archived content should disappear from remote client
assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
# Test when connected peer has the optional file
def testOptionalDownload(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
2019-01-20 19:07:16 +01:00
client = ConnectionServer(file_server.ip, 1545)
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
2019-01-20 19:07:16 +01:00
site_temp.addPeer(file_server.ip, 1544)
# Download site
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
# Download optional data/optional.txt
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.storage.isFile("data/optional.txt")
assert site.storage.isFile("data/optional.txt")
site_temp.needFile("data/optional.txt")
assert site_temp.storage.isFile("data/optional.txt")
# Optional user file
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
optional_file_info = site_temp.content_manager.getFileInfo(
"data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
)
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
# Test when connected peer does not has the file, so ask him if he know someone who has it
def testFindOptional(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init full source server (has optional files)
site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
2019-01-20 19:07:16 +01:00
file_server_full = FileServer(file_server.ip, 1546)
site_full.connection_server = file_server_full
def listen():
ConnectionServer.start(file_server_full)
ConnectionServer.listen(file_server_full)
gevent.spawn(listen)
2016-03-16 00:34:57 +01:00
time.sleep(0.001) # Port opening
file_server_full.sites[site_full.address] = site_full # Add site
site_full.storage.verifyFiles(quick_check=True) # Check optional files
2019-01-20 19:07:16 +01:00
site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server
2016-11-07 22:44:03 +01:00
hashfield = site_full_peer.updateHashfield() # Update hashfield
assert len(site_full.content_manager.hashfield) == 8
assert hashfield
assert site_full.storage.isFile("data/optional.txt")
assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert len(site_full_peer.hashfield) == 8
# Remove hashes from source server
for hash in list(site.content_manager.hashfield):
site.content_manager.hashfield.remove(hash)
# Init client server
2019-01-20 19:07:16 +01:00
site_temp.connection_server = ConnectionServer(file_server.ip, 1545)
site_temp.addPeer(file_server.ip, 1544) # Add source server
# Download normal files
site_temp.log.info("Start Downloading site")
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
# Download optional data/optional.txt
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert not site_temp.storage.isFile("data/optional.txt")
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file
assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file
assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file
assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file
assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file
assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file
site_temp.log.info("Request optional files")
with Spy.Spy(FileRequest, "route") as requests:
# Request 2 file same time
threads = []
threads.append(site_temp.needFile("data/optional.txt", blocking=False))
threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False))
gevent.joinall(threads)
assert len([request for request in requests if request[1] == "findHashIds"]) == 1 # findHashids should call only once
assert site_temp.storage.isFile("data/optional.txt")
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
assert site_temp.storage.deleteFiles()
file_server_full.stop()
[connection.close() for connection in file_server.connections]
2019-12-17 15:01:15 +01:00
site_full.content_manager.contents.db.close("FindOptional test end")
2016-04-06 14:01:20 +02:00
def testUpdate(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
2019-01-20 19:07:16 +01:00
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
2016-04-06 14:01:20 +02:00
site_temp.connection_server = client
# Don't try to find peers from the net
site.announce = mock.MagicMock(return_value=True)
site_temp.announce = mock.MagicMock(return_value=True)
# Connect peers
2019-01-20 19:07:16 +01:00
site_temp.addPeer(file_server.ip, 1544)
2016-04-06 14:01:20 +02:00
# Download site from site to site_temp
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert len(site_temp.bad_files) == 1
2016-04-06 14:01:20 +02:00
# Update file
data_original = site.storage.open("data/data.json").read()
2019-03-15 21:06:59 +01:00
data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"')
2016-04-06 14:01:20 +02:00
assert data_original != data_new
site.storage.open("data/data.json", "wb").write(data_new)
assert site.storage.open("data/data.json").read() == data_new
assert site_temp.storage.open("data/data.json").read() == data_original
site.log.info("Publish new data.json without patch")
2016-04-06 14:01:20 +02:00
# Publish without patch
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
site.publish()
time.sleep(0.1)
site.log.info("Downloading site")
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1
2016-04-06 14:01:20 +02:00
assert site_temp.storage.open("data/data.json").read() == data_new
# Close connection to avoid update spam limit
2019-03-15 21:06:59 +01:00
list(site.peers.values())[0].remove()
2019-01-20 19:07:16 +01:00
site.addPeer(file_server.ip, 1545)
2019-03-15 21:06:59 +01:00
list(site_temp.peers.values())[0].ping() # Connect back
2016-04-06 14:01:20 +02:00
time.sleep(0.1)
# Update with patch
2019-03-15 21:06:59 +01:00
data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
2016-04-06 14:01:20 +02:00
assert data_original != data_new
site.storage.open("data/data.json-new", "wb").write(data_new)
assert site.storage.open("data/data.json-new").read() == data_new
assert site_temp.storage.open("data/data.json").read() != data_new
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
assert not site.storage.isFile("data/data.json-new") # New data file removed
assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
assert "data/data.json" in diffs
2019-03-15 21:06:59 +01:00
assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
2016-04-06 14:01:20 +02:00
# Publish with patch
site.log.info("Publish new data.json with patch")
2016-04-06 14:01:20 +02:00
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
2019-12-17 15:01:15 +01:00
event_done = gevent.event.AsyncResult()
2016-04-06 14:01:20 +02:00
site.publish(diffs=diffs)
2019-12-17 15:01:15 +01:00
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
2019-12-17 15:01:15 +01:00
assert [request for request in requests if request[1] in ("getFile", "streamFile")] == []
2016-04-06 14:01:20 +02:00
assert site_temp.storage.open("data/data.json").read() == data_new
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]
def testBigUpdate(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
2019-09-12 00:23:36 +02:00
# Connect peers
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"]
# Update file
data_original = site.storage.open("data/data.json").read()
data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"')
assert data_original != data_new
site.storage.open("data/data.json-new", "wb").write(data_new)
assert site.storage.open("data/data.json-new").read() == data_new
assert site_temp.storage.open("data/data.json").read() != data_new
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
assert not site.storage.isFile("data/data.json-new") # New data file removed
assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
assert "data/data.json" in diffs
content_json = site.storage.loadJson("content.json")
2019-12-17 15:01:15 +01:00
content_json["description"] = "BigZeroBlog" * 1024 * 10
site.storage.writeJson("content.json", content_json)
site.content_manager.loadContent("content.json", force=True)
# Publish with patch
site.log.info("Publish new data.json with patch")
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json
site.publish(diffs=diffs)
2019-12-17 15:01:15 +01:00
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")]
assert len(file_requests) == 1
assert site_temp.storage.open("data/data.json").read() == data_new
assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
# Test what happened if the content.json of the site is bigger than the site limit
def testHugeContentSiteUpdate(self, file_server, site, site_temp):
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
site_temp.connection_server = client
2019-09-12 00:23:36 +02:00
# Connect peers
site_temp.addPeer(file_server.ip, 1544)
# Download site from site to site_temp
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
2019-11-07 02:44:33 +01:00
site_temp.settings["size_limit"] = int(20 * 1024 *1024)
site_temp.saveSettings()
# Raise limit size to 20MB on site so it can be signed
site.settings["size_limit"] = int(20 * 1024 *1024)
site.saveSettings()
content_json = site.storage.loadJson("content.json")
content_json["description"] = "PartirUnJour" * 1024 * 1024
site.storage.writeJson("content.json", content_json)
changed, deleted = site.content_manager.loadContent("content.json", force=True)
# Make sure we have 2 differents content.json
assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read()
# Generate diff
diffs = site.content_manager.getDiffs("content.json")
# Publish with patch
site.log.info("Publish new content.json bigger than 10MB")
with Spy.Spy(FileRequest, "route") as requests:
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB
2019-12-17 15:01:15 +01:00
time.sleep(0.1)
site.publish(diffs=diffs)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024
2019-11-07 02:44:33 +01:00
assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read()
2019-11-07 02:44:54 +01:00
def testUnicodeFilename(self, file_server, site, site_temp):
assert site.storage.directory == config.data_dir + "/" + site.address
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
# Init source server
site.connection_server = file_server
file_server.sites[site.address] = site
# Init client server
client = FileServer(file_server.ip, 1545)
client.sites = {site_temp.address: site_temp}
2019-11-07 02:44:54 +01:00
site_temp.connection_server = client
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
site_temp.addPeer(file_server.ip, 1544)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10)
2019-11-07 02:44:54 +01:00
site.storage.write("data/img/árvíztűrő.png", b"test")
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
content = site.storage.loadJson("content.json")
assert "data/img/árvíztűrő.png" in content["files"]
assert not site_temp.storage.isFile("data/img/árvíztűrő.png")
settings_before = site_temp.settings
with Spy.Spy(FileRequest, "route") as requests:
site.publish()
time.sleep(0.1)
assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download
2019-11-07 02:44:54 +01:00
assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1
content = site_temp.storage.loadJson("content.json")
assert "data/img/árvíztűrő.png" in content["files"]
assert site_temp.storage.isFile("data/img/árvíztűrő.png")
assert site_temp.settings["size"] == settings_before["size"]
assert site_temp.settings["size_optional"] == settings_before["size_optional"]
assert site_temp.storage.deleteFiles()
[connection.close() for connection in file_server.connections]