2015-10-22 11:42:55 +02:00
|
|
|
import time
|
|
|
|
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
import pytest
|
|
|
|
import mock
|
2015-10-22 11:42:55 +02:00
|
|
|
import gevent
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
|
|
|
|
from Connection import ConnectionServer
|
|
|
|
from Config import config
|
|
|
|
from File import FileRequest
|
2015-10-22 11:42:55 +02:00
|
|
|
from File import FileServer
|
|
|
|
from Site import Site
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
import Spy
|
|
|
|
|
2015-10-11 02:22:53 +02:00
|
|
|
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
@pytest.mark.usefixtures("resetTempSettings")
|
|
|
|
@pytest.mark.usefixtures("resetSettings")
|
2015-10-11 02:22:53 +02:00
|
|
|
class TestSiteDownload:
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
def testDownload(self, file_server, site, site_temp):
|
2015-10-22 11:42:55 +02:00
|
|
|
file_server.ip_incoming = {} # Reset flood protection
|
|
|
|
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
assert site.storage.directory == config.data_dir + "/" + site.address
|
|
|
|
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
|
|
|
|
|
|
|
|
# Init source server
|
|
|
|
site.connection_server = file_server
|
|
|
|
file_server.sites[site.address] = site
|
|
|
|
|
|
|
|
# Init client server
|
2015-10-22 11:42:55 +02:00
|
|
|
client = ConnectionServer("127.0.0.1", 1545)
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
site_temp.connection_server = client
|
|
|
|
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
|
|
|
|
|
|
|
|
site_temp.addPeer("127.0.0.1", 1544)
|
|
|
|
with Spy.Spy(FileRequest, "route") as requests:
|
|
|
|
def boostRequest(inner_path):
|
|
|
|
# I really want these file
|
|
|
|
if inner_path == "index.html":
|
|
|
|
print "needFile"
|
|
|
|
site_temp.needFile("data/img/multiuser.png", priority=9, blocking=False)
|
|
|
|
site_temp.needFile("data/img/direct_domains.png", priority=10, blocking=False)
|
|
|
|
site_temp.onFileDone.append(boostRequest)
|
|
|
|
site_temp.download(blind_includes=True).join(timeout=5)
|
|
|
|
file_requests = [request[2]["inner_path"] for request in requests if request[0] in ("getFile", "streamFile")]
|
|
|
|
# Test priority
|
|
|
|
assert file_requests[0:2] == ["content.json", "index.html"] # Must-have files
|
|
|
|
assert file_requests[2:4] == ["data/img/direct_domains.png", "data/img/multiuser.png"] # Directly requested files
|
|
|
|
assert file_requests[4:6] == ["css/all.css", "js/all.js"] # Important assets
|
|
|
|
assert file_requests[6] == "dbschema.json" # Database map
|
|
|
|
assert "-default" in file_requests[-1] # Put default files for cloning to the end
|
|
|
|
|
|
|
|
# Check files
|
|
|
|
bad_files = site_temp.storage.verifyFiles(quick_check=True)
|
|
|
|
|
|
|
|
# -1 because data/users/1J6... user has invalid cert
|
|
|
|
assert len(site_temp.content_manager.contents) == len(site.content_manager.contents) - 1
|
|
|
|
assert not bad_files
|
|
|
|
|
2016-04-06 14:01:20 +02:00
|
|
|
assert site_temp.storage.deleteFiles()
|
2015-10-22 11:42:55 +02:00
|
|
|
[connection.close() for connection in file_server.connections]
|
|
|
|
|
|
|
|
# Test when connected peer has the optional file
|
|
|
|
def testOptionalDownload(self, file_server, site, site_temp):
|
|
|
|
file_server.ip_incoming = {} # Reset flood protection
|
|
|
|
|
|
|
|
# Init source server
|
|
|
|
site.connection_server = file_server
|
|
|
|
file_server.sites[site.address] = site
|
|
|
|
|
|
|
|
# Init client server
|
|
|
|
client = ConnectionServer("127.0.0.1", 1545)
|
|
|
|
site_temp.connection_server = client
|
|
|
|
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
|
|
|
|
|
|
|
|
site_temp.addPeer("127.0.0.1", 1544)
|
|
|
|
|
|
|
|
# Download site
|
|
|
|
site_temp.download(blind_includes=True).join(timeout=5)
|
|
|
|
|
|
|
|
# Download optional data/optional.txt
|
|
|
|
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
|
|
|
|
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
|
|
|
|
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
|
|
|
|
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
|
|
|
|
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
assert not site_temp.storage.isFile("data/optional.txt")
|
|
|
|
assert site.storage.isFile("data/optional.txt")
|
|
|
|
site_temp.needFile("data/optional.txt")
|
|
|
|
assert site_temp.storage.isFile("data/optional.txt")
|
|
|
|
|
|
|
|
# Optional user file
|
|
|
|
assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
2015-10-11 02:22:53 +02:00
|
|
|
optional_file_info = site_temp.content_manager.getFileInfo(
|
|
|
|
"data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
|
|
|
|
)
|
2015-10-22 11:42:55 +02:00
|
|
|
assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
|
2015-10-11 02:22:53 +02:00
|
|
|
assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
|
|
|
|
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
|
|
|
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
2015-10-11 02:22:53 +02:00
|
|
|
assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
|
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
|
|
|
|
|
|
|
assert site_temp.storage.deleteFiles()
|
2015-10-22 11:42:55 +02:00
|
|
|
[connection.close() for connection in file_server.connections]
|
|
|
|
|
|
|
|
# Test when connected peer does not has the file, so ask him if he know someone who has it
|
|
|
|
def testFindOptional(self, file_server, site, site_temp):
|
|
|
|
file_server.ip_incoming = {} # Reset flood protection
|
|
|
|
|
|
|
|
# Init source server
|
|
|
|
site.connection_server = file_server
|
|
|
|
file_server.sites[site.address] = site
|
|
|
|
|
|
|
|
# Init full source server (has optional files)
|
|
|
|
site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
|
|
|
|
file_server_full = FileServer("127.0.0.1", 1546)
|
|
|
|
site_full.connection_server = file_server_full
|
|
|
|
gevent.spawn(lambda: ConnectionServer.start(file_server_full))
|
2016-03-16 00:34:57 +01:00
|
|
|
time.sleep(0.001) # Port opening
|
2015-10-22 11:42:55 +02:00
|
|
|
file_server_full.sites[site_full.address] = site_full # Add site
|
|
|
|
site_full.storage.verifyFiles(quick_check=True) # Check optional files
|
|
|
|
site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server
|
|
|
|
assert site_full_peer.updateHashfield() # Update hashfield
|
|
|
|
|
|
|
|
# Init client server
|
|
|
|
site_temp.connection_server = ConnectionServer("127.0.0.1", 1545)
|
|
|
|
site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
|
|
|
|
site_temp.addPeer("127.0.0.1", 1544) # Add source server
|
|
|
|
|
|
|
|
# Download normal files
|
|
|
|
site_temp.download(blind_includes=True).join(timeout=5)
|
|
|
|
|
|
|
|
# Download optional data/optional.txt
|
|
|
|
optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
|
|
|
|
assert not site_temp.storage.isFile("data/optional.txt")
|
|
|
|
assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file
|
|
|
|
assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file
|
|
|
|
assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file
|
|
|
|
|
|
|
|
with Spy.Spy(FileRequest, "route") as requests:
|
2015-10-28 01:28:29 +01:00
|
|
|
# Request 2 file same time
|
|
|
|
threads = []
|
|
|
|
threads.append(site_temp.needFile("data/optional.txt", blocking=False))
|
|
|
|
threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False))
|
|
|
|
gevent.joinall(threads)
|
|
|
|
|
|
|
|
assert len([request for request in requests if request[0] == "findHashIds"]) == 1 # findHashids should call only once
|
|
|
|
|
|
|
|
assert site_temp.storage.isFile("data/optional.txt")
|
|
|
|
assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
|
2015-10-22 11:42:55 +02:00
|
|
|
|
|
|
|
assert site_temp.storage.deleteFiles()
|
|
|
|
file_server_full.stop()
|
|
|
|
[connection.close() for connection in file_server.connections]
|
2016-04-06 14:01:20 +02:00
|
|
|
|
|
|
|
def testUpdate(self, file_server, site, site_temp):
|
|
|
|
file_server.ip_incoming = {} # Reset flood protection
|
|
|
|
|
|
|
|
assert site.storage.directory == config.data_dir + "/" + site.address
|
|
|
|
assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
|
|
|
|
|
|
|
|
# Init source server
|
|
|
|
site.connection_server = file_server
|
|
|
|
file_server.sites[site.address] = site
|
|
|
|
|
|
|
|
# Init client server
|
|
|
|
client = FileServer("127.0.0.1", 1545)
|
|
|
|
client.sites[site_temp.address] = site_temp
|
|
|
|
site_temp.connection_server = client
|
|
|
|
|
|
|
|
# Don't try to find peers from the net
|
|
|
|
site.announce = mock.MagicMock(return_value=True)
|
|
|
|
site_temp.announce = mock.MagicMock(return_value=True)
|
|
|
|
|
|
|
|
# Connect peers
|
|
|
|
site_temp.addPeer("127.0.0.1", 1544)
|
|
|
|
|
|
|
|
# Download site from site to site_temp
|
|
|
|
site_temp.download(blind_includes=True).join(timeout=5)
|
|
|
|
|
|
|
|
# Update file
|
|
|
|
data_original = site.storage.open("data/data.json").read()
|
|
|
|
data_new = data_original.replace('"ZeroBlog"', '"UpdatedZeroBlog"')
|
|
|
|
assert data_original != data_new
|
|
|
|
|
|
|
|
site.storage.open("data/data.json", "wb").write(data_new)
|
|
|
|
|
|
|
|
assert site.storage.open("data/data.json").read() == data_new
|
|
|
|
assert site_temp.storage.open("data/data.json").read() == data_original
|
|
|
|
|
|
|
|
# Publish without patch
|
|
|
|
with Spy.Spy(FileRequest, "route") as requests:
|
|
|
|
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
|
|
|
|
site.publish()
|
|
|
|
site_temp.download(blind_includes=True).join(timeout=5)
|
|
|
|
assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 1
|
|
|
|
|
|
|
|
assert site_temp.storage.open("data/data.json").read() == data_new
|
|
|
|
|
|
|
|
# Close connection to avoid update spam limit
|
|
|
|
site.peers.values()[0].remove()
|
|
|
|
site.addPeer("127.0.0.1", 1545)
|
|
|
|
site_temp.peers.values()[0].ping() # Connect back
|
|
|
|
time.sleep(0.1)
|
|
|
|
|
|
|
|
# Update with patch
|
|
|
|
data_new = data_original.replace('"ZeroBlog"', '"PatchedZeroBlog"')
|
|
|
|
assert data_original != data_new
|
|
|
|
|
|
|
|
site.storage.open("data/data.json-new", "wb").write(data_new)
|
|
|
|
|
|
|
|
assert site.storage.open("data/data.json-new").read() == data_new
|
|
|
|
assert site_temp.storage.open("data/data.json").read() != data_new
|
|
|
|
|
|
|
|
# Generate diff
|
|
|
|
diffs = site.content_manager.getDiffs("content.json")
|
|
|
|
assert not site.storage.isFile("data/data.json-new") # New data file removed
|
|
|
|
assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
|
|
|
|
assert "data/data.json" in diffs
|
|
|
|
assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
|
|
|
|
|
|
|
|
# Publish with patch
|
|
|
|
with Spy.Spy(FileRequest, "route") as requests:
|
|
|
|
site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
|
|
|
|
site.publish(diffs=diffs)
|
|
|
|
site_temp.download(blind_includes=True).join(timeout=5)
|
|
|
|
assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 0
|
|
|
|
|
|
|
|
assert site_temp.storage.open("data/data.json").read() == data_new
|
|
|
|
|
|
|
|
assert site_temp.storage.deleteFiles()
|
|
|
|
[connection.close() for connection in file_server.connections]
|