Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
import time
|
2019-03-15 21:06:59 +01:00
|
|
|
import io
|
2015-09-28 00:22:27 +02:00
|
|
|
|
|
|
|
import pytest
|
|
|
|
|
|
|
|
from File import FileServer
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
from File import FileRequest
|
2015-10-11 02:22:53 +02:00
|
|
|
from Crypt import CryptHash
|
2019-03-15 21:06:59 +01:00
|
|
|
from . import Spy
|
2015-09-28 00:22:27 +02:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("resetSettings")
|
|
|
|
@pytest.mark.usefixtures("resetTempSettings")
|
2015-10-22 11:42:55 +02:00
|
|
|
class TestPeer:
|
2015-09-28 00:22:27 +02:00
|
|
|
def testPing(self, file_server, site, site_temp):
|
|
|
|
file_server.sites[site.address] = site
|
2019-01-20 19:07:16 +01:00
|
|
|
client = FileServer(file_server.ip, 1545)
|
2019-11-25 14:34:46 +01:00
|
|
|
client.sites = {site_temp.address: site_temp}
|
2015-09-28 00:22:27 +02:00
|
|
|
site_temp.connection_server = client
|
2019-01-20 19:07:16 +01:00
|
|
|
connection = client.getConnection(file_server.ip, 1544)
|
2015-09-28 00:22:27 +02:00
|
|
|
|
|
|
|
# Add file_server as peer to client
|
2019-01-20 19:07:16 +01:00
|
|
|
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
|
2015-09-28 00:22:27 +02:00
|
|
|
|
2015-09-28 22:07:26 +02:00
|
|
|
assert peer_file_server.ping() is not None
|
2015-09-28 00:22:27 +02:00
|
|
|
|
|
|
|
assert peer_file_server in site_temp.peers.values()
|
|
|
|
peer_file_server.remove()
|
|
|
|
assert peer_file_server not in site_temp.peers.values()
|
|
|
|
|
|
|
|
connection.close()
|
|
|
|
client.stop()
|
|
|
|
|
|
|
|
def testDownloadFile(self, file_server, site, site_temp):
|
|
|
|
file_server.sites[site.address] = site
|
2019-01-20 19:07:16 +01:00
|
|
|
client = FileServer(file_server.ip, 1545)
|
2019-11-25 14:34:46 +01:00
|
|
|
client.sites = {site_temp.address: site_temp}
|
2015-09-28 00:22:27 +02:00
|
|
|
site_temp.connection_server = client
|
2019-01-20 19:07:16 +01:00
|
|
|
connection = client.getConnection(file_server.ip, 1544)
|
2015-09-28 00:22:27 +02:00
|
|
|
|
|
|
|
# Add file_server as peer to client
|
2019-01-20 19:07:16 +01:00
|
|
|
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
|
2015-09-28 00:22:27 +02:00
|
|
|
|
|
|
|
# Testing streamFile
|
2017-10-04 13:32:39 +02:00
|
|
|
buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True)
|
2019-03-15 21:06:59 +01:00
|
|
|
assert b"sign" in buff.getvalue()
|
2015-09-28 00:22:27 +02:00
|
|
|
|
|
|
|
# Testing getFile
|
|
|
|
buff = peer_file_server.getFile(site_temp.address, "content.json")
|
2019-03-15 21:06:59 +01:00
|
|
|
assert b"sign" in buff.getvalue()
|
2015-09-28 00:22:27 +02:00
|
|
|
|
|
|
|
connection.close()
|
|
|
|
client.stop()
|
2015-10-11 02:22:53 +02:00
|
|
|
|
|
|
|
def testHashfield(self, site):
|
2019-03-15 21:06:59 +01:00
|
|
|
sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"]
|
2015-10-22 11:42:55 +02:00
|
|
|
|
2015-10-11 02:22:53 +02:00
|
|
|
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
|
|
|
|
|
|
|
|
# Check if hashfield has any files
|
2015-10-22 11:42:55 +02:00
|
|
|
assert site.content_manager.hashfield
|
2015-10-11 02:22:53 +02:00
|
|
|
assert len(site.content_manager.hashfield) > 0
|
|
|
|
|
|
|
|
# Check exsist hash
|
|
|
|
assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield
|
|
|
|
|
|
|
|
# Add new hash
|
2019-03-15 21:06:59 +01:00
|
|
|
new_hash = CryptHash.sha512sum(io.BytesIO(b"hello"))
|
2015-10-11 02:22:53 +02:00
|
|
|
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
|
|
|
|
assert site.content_manager.hashfield.appendHash(new_hash)
|
|
|
|
assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time
|
|
|
|
assert site.content_manager.hashfield.getHashId(new_hash) in site.content_manager.hashfield
|
|
|
|
|
|
|
|
# Remove new hash
|
|
|
|
assert site.content_manager.hashfield.removeHash(new_hash)
|
|
|
|
assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield
|
|
|
|
|
|
|
|
def testHashfieldExchange(self, file_server, site, site_temp):
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
server1 = file_server
|
|
|
|
server1.sites[site.address] = site
|
2019-01-20 19:07:16 +01:00
|
|
|
site.connection_server = server1
|
|
|
|
|
|
|
|
server2 = FileServer(file_server.ip, 1545)
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
server2.sites[site_temp.address] = site_temp
|
|
|
|
site_temp.connection_server = server2
|
2015-10-11 02:22:53 +02:00
|
|
|
site.storage.verifyFiles(quick_check=True) # Find what optional files we have
|
|
|
|
|
|
|
|
# Add file_server as peer to client
|
2019-01-20 19:07:16 +01:00
|
|
|
server2_peer1 = site_temp.addPeer(file_server.ip, 1544)
|
2015-10-11 02:22:53 +02:00
|
|
|
|
|
|
|
# Check if hashfield has any files
|
|
|
|
assert len(site.content_manager.hashfield) > 0
|
|
|
|
|
|
|
|
# Testing hashfield sync
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
assert len(server2_peer1.hashfield) == 0
|
|
|
|
assert server2_peer1.updateHashfield() # Query hashfield from peer
|
|
|
|
assert len(server2_peer1.hashfield) > 0
|
2015-10-11 02:22:53 +02:00
|
|
|
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
# Test force push new hashfield
|
|
|
|
site_temp.content_manager.hashfield.appendHash("AABB")
|
2019-01-20 19:07:16 +01:00
|
|
|
server1_peer2 = site.addPeer(file_server.ip, 1545, return_peer=True)
|
Rev536, Fix stats page, Support ranged http requests for better video browser compatibility, setHashfield command, One by one send hashfield to connected peers if changed, Keep count hashfield changetime, PeerHashfield optimalizations, Wait for peers on checkmodification, Give more time to query trackers, Do not count udp trackers as error if udp disabled, Test hashfield push
2015-10-30 02:08:02 +01:00
|
|
|
with Spy.Spy(FileRequest, "route") as requests:
|
|
|
|
assert len(server1_peer2.hashfield) == 0
|
|
|
|
server2_peer1.sendMyHashfield()
|
|
|
|
assert len(server1_peer2.hashfield) == 1
|
|
|
|
server2_peer1.sendMyHashfield() # Hashfield not changed, should be ignored
|
|
|
|
|
|
|
|
assert len(requests) == 1
|
|
|
|
|
|
|
|
time.sleep(0.01) # To make hashfield change date different
|
|
|
|
|
|
|
|
site_temp.content_manager.hashfield.appendHash("AACC")
|
|
|
|
server2_peer1.sendMyHashfield() # Push hashfield
|
|
|
|
|
|
|
|
assert len(server1_peer2.hashfield) == 2
|
|
|
|
assert len(requests) == 2
|
|
|
|
|
|
|
|
site_temp.content_manager.hashfield.appendHash("AADD")
|
|
|
|
|
|
|
|
assert server1_peer2.updateHashfield(force=True) # Request hashfield
|
|
|
|
assert len(server1_peer2.hashfield) == 3
|
|
|
|
assert len(requests) == 3
|
|
|
|
|
|
|
|
assert not server2_peer1.sendMyHashfield() # Not changed, should be ignored
|
|
|
|
assert len(requests) == 3
|
|
|
|
|
|
|
|
server2.stop()
|
2015-10-22 11:42:55 +02:00
|
|
|
|
|
|
|
def testFindHash(self, file_server, site, site_temp):
|
|
|
|
file_server.sites[site.address] = site
|
2019-01-20 19:07:16 +01:00
|
|
|
client = FileServer(file_server.ip, 1545)
|
2019-11-25 14:34:46 +01:00
|
|
|
client.sites = {site_temp.address: site_temp}
|
2015-10-22 11:42:55 +02:00
|
|
|
site_temp.connection_server = client
|
|
|
|
|
|
|
|
# Add file_server as peer to client
|
2019-01-20 19:07:16 +01:00
|
|
|
peer_file_server = site_temp.addPeer(file_server.ip, 1544)
|
2015-10-22 11:42:55 +02:00
|
|
|
|
|
|
|
assert peer_file_server.findHashIds([1234]) == {}
|
|
|
|
|
|
|
|
# Add fake peer with requred hash
|
2019-01-20 19:07:16 +01:00
|
|
|
fake_peer_1 = site.addPeer(file_server.ip_external, 1544)
|
2015-10-22 11:42:55 +02:00
|
|
|
fake_peer_1.hashfield.append(1234)
|
|
|
|
fake_peer_2 = site.addPeer("1.2.3.5", 1545)
|
|
|
|
fake_peer_2.hashfield.append(1234)
|
|
|
|
fake_peer_2.hashfield.append(1235)
|
|
|
|
fake_peer_3 = site.addPeer("1.2.3.6", 1546)
|
|
|
|
fake_peer_3.hashfield.append(1235)
|
|
|
|
fake_peer_3.hashfield.append(1236)
|
|
|
|
|
2019-01-20 19:07:16 +01:00
|
|
|
res = peer_file_server.findHashIds([1234, 1235])
|
|
|
|
assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545)])
|
|
|
|
assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])
|
Rev957, Sidebar displays onion peers in graph, Sidebar display bad file retry number, Sidebar site Update/Pause/Delete, Ratelimit sidebar update, Encoded typo, Fix onion findHashId, More retry for bad files, Log file path errors, Testcase for self findhashIds, Testcase for Tor findHashId, Better Tor version parse, UiWebsocket callback on update/pause/resume/delete, Skip invalid postMessage messages
2016-03-09 00:48:57 +01:00
|
|
|
|
|
|
|
# Test my address adding
|
|
|
|
site.content_manager.hashfield.append(1234)
|
|
|
|
|
|
|
|
res = peer_file_server.findHashIds([1234, 1235])
|
2019-01-20 19:07:16 +01:00
|
|
|
assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545), (file_server.ip, 1544)])
|
|
|
|
assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)])
|