2015-10-22 11:42:55 +02:00
import time
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
import pytest
import mock
2015-10-22 11:42:55 +02:00
import gevent
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
from Connection import ConnectionServer
from Config import config
from File import FileRequest
2015-10-22 11:42:55 +02:00
from File import FileServer
2019-04-15 12:31:33 +02:00
from Site . Site import Site
2019-03-15 21:06:59 +01:00
from . import Spy
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
2015-10-11 02:22:53 +02:00
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
@pytest.mark.usefixtures ( " resetTempSettings " )
@pytest.mark.usefixtures ( " resetSettings " )
2015-10-11 02:22:53 +02:00
class TestSiteDownload :
2019-03-16 01:01:30 +01:00
def testDownload ( self , file_server , site , site_temp , crypt_bitcoin_lib ) :
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
assert site . storage . directory == config . data_dir + " / " + site . address
assert site_temp . storage . directory == config . data_dir + " -temp/ " + site . address
# Init source server
site . connection_server = file_server
file_server . sites [ site . address ] = site
# Init client server
2019-01-20 19:07:16 +01:00
client = ConnectionServer ( file_server . ip , 1545 )
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
site_temp . connection_server = client
site_temp . announce = mock . MagicMock ( return_value = True ) # Don't try to find peers from the net
2019-01-20 19:07:16 +01:00
site_temp . addPeer ( file_server . ip , 1544 )
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
with Spy . Spy ( FileRequest , " route " ) as requests :
def boostRequest ( inner_path ) :
# I really want these file
if inner_path == " index.html " :
2017-05-07 21:21:26 +02:00
site_temp . needFile ( " data/img/multiuser.png " , priority = 15 , blocking = False )
site_temp . needFile ( " data/img/direct_domains.png " , priority = 15 , blocking = False )
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
site_temp . onFileDone . append ( boostRequest )
site_temp . download ( blind_includes = True ) . join ( timeout = 5 )
2017-10-04 13:31:49 +02:00
file_requests = [ request [ 3 ] [ " inner_path " ] for request in requests if request [ 1 ] in ( " getFile " , " streamFile " ) ]
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
# Test priority
assert file_requests [ 0 : 2 ] == [ " content.json " , " index.html " ] # Must-have files
2019-03-16 00:51:32 +01:00
assert sorted ( file_requests [ 2 : 4 ] ) == [ " data/img/direct_domains.png " , " data/img/multiuser.png " ] # Directly requested files
assert sorted ( file_requests [ 4 : 6 ] ) == [ " css/all.css " , " js/all.js " ] # Important assets
2017-05-07 21:34:44 +02:00
assert file_requests [ 6 ] == " dbschema.json " # Database map
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
assert " -default " in file_requests [ - 1 ] # Put default files for cloning to the end
# Check files
2018-03-29 02:49:06 +02:00
bad_files = site_temp . storage . verifyFiles ( quick_check = True ) [ " bad_files " ]
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
# -1 because data/users/1J6... user has invalid cert
assert len ( site_temp . content_manager . contents ) == len ( site . content_manager . contents ) - 1
assert not bad_files
2019-03-27 03:10:58 +01:00
assert file_server . num_incoming == 2 # One for file_server fixture, one for the test
2016-04-06 14:01:20 +02:00
assert site_temp . storage . deleteFiles ( )
2015-10-22 11:42:55 +02:00
[ connection . close ( ) for connection in file_server . connections ]
2019-03-27 03:10:58 +01:00
2016-08-10 12:59:42 +02:00
def testArchivedDownload ( self , file_server , site , site_temp ) :
# Init source server
site . connection_server = file_server
file_server . sites [ site . address ] = site
# Init client server
2019-01-20 19:07:16 +01:00
client = FileServer ( file_server . ip , 1545 )
2016-08-10 12:59:42 +02:00
client . sites [ site_temp . address ] = site_temp
site_temp . connection_server = client
# Download normally
2019-01-20 19:07:16 +01:00
site_temp . addPeer ( file_server . ip , 1544 )
2016-08-10 12:59:42 +02:00
site_temp . download ( blind_includes = True ) . join ( timeout = 5 )
2018-03-29 02:49:06 +02:00
bad_files = site_temp . storage . verifyFiles ( quick_check = True ) [ " bad_files " ]
2016-09-05 13:58:10 +02:00
2016-08-10 12:59:42 +02:00
assert not bad_files
assert " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " in site_temp . content_manager . contents
assert site_temp . storage . isFile ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " )
assert len ( list ( site_temp . storage . query ( " SELECT * FROM comment " ) ) ) == 2
# Add archived data
assert not " archived " in site . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ]
assert not site . content_manager . isArchived ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " , time . time ( ) - 1 )
site . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ] [ " archived " ] = { " 1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q " : time . time ( ) }
site . content_manager . sign ( " data/users/content.json " , privatekey = " 5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv " )
date_archived = site . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ] [ " archived " ] [ " 1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q " ]
assert site . content_manager . isArchived ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " , date_archived - 1 )
assert site . content_manager . isArchived ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " , date_archived )
assert not site . content_manager . isArchived ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " , date_archived + 1 ) # Allow user to update archived data later
# Push archived update
assert not " archived " in site_temp . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ]
site . publish ( )
2018-02-20 10:18:35 +01:00
time . sleep ( 0.1 )
2016-08-10 12:59:42 +02:00
site_temp . download ( blind_includes = True ) . join ( timeout = 5 ) # Wait for download
# The archived content should disappear from remote client
assert " archived " in site_temp . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ]
assert " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " not in site_temp . content_manager . contents
assert not site_temp . storage . isDir ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q " )
assert len ( list ( site_temp . storage . query ( " SELECT * FROM comment " ) ) ) == 1
assert len ( list ( site_temp . storage . query ( " SELECT * FROM json WHERE directory LIKE ' % 1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q % ' " ) ) ) == 0
assert site_temp . storage . deleteFiles ( )
[ connection . close ( ) for connection in file_server . connections ]
2018-10-15 12:58:39 +02:00
def testArchivedBeforeDownload ( self , file_server , site , site_temp ) :
# Init source server
site . connection_server = file_server
file_server . sites [ site . address ] = site
# Init client server
2019-01-20 19:07:16 +01:00
client = FileServer ( file_server . ip , 1545 )
2018-10-15 12:58:39 +02:00
client . sites [ site_temp . address ] = site_temp
site_temp . connection_server = client
# Download normally
2019-01-20 19:07:16 +01:00
site_temp . addPeer ( file_server . ip , 1544 )
2018-10-15 12:58:39 +02:00
site_temp . download ( blind_includes = True ) . join ( timeout = 5 )
bad_files = site_temp . storage . verifyFiles ( quick_check = True ) [ " bad_files " ]
assert not bad_files
assert " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " in site_temp . content_manager . contents
assert site_temp . storage . isFile ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " )
assert len ( list ( site_temp . storage . query ( " SELECT * FROM comment " ) ) ) == 2
# Add archived data
assert not " archived_before " in site . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ]
assert not site . content_manager . isArchived ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " , time . time ( ) - 1 )
content_modification_time = site . content_manager . contents [ " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " ] [ " modified " ]
site . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ] [ " archived_before " ] = content_modification_time
site . content_manager . sign ( " data/users/content.json " , privatekey = " 5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv " )
date_archived = site . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ] [ " archived_before " ]
assert site . content_manager . isArchived ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " , date_archived - 1 )
assert site . content_manager . isArchived ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " , date_archived )
assert not site . content_manager . isArchived ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " , date_archived + 1 ) # Allow user to update archived data later
# Push archived update
assert not " archived_before " in site_temp . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ]
site . publish ( )
time . sleep ( 0.1 )
site_temp . download ( blind_includes = True ) . join ( timeout = 5 ) # Wait for download
# The archived content should disappear from remote client
assert " archived_before " in site_temp . content_manager . contents [ " data/users/content.json " ] [ " user_contents " ]
assert " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json " not in site_temp . content_manager . contents
assert not site_temp . storage . isDir ( " data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q " )
assert len ( list ( site_temp . storage . query ( " SELECT * FROM comment " ) ) ) == 1
assert len ( list ( site_temp . storage . query ( " SELECT * FROM json WHERE directory LIKE ' % 1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q % ' " ) ) ) == 0
assert site_temp . storage . deleteFiles ( )
[ connection . close ( ) for connection in file_server . connections ]
2015-10-22 11:42:55 +02:00
# Test when connected peer has the optional file
def testOptionalDownload ( self , file_server , site , site_temp ) :
# Init source server
site . connection_server = file_server
file_server . sites [ site . address ] = site
# Init client server
2019-01-20 19:07:16 +01:00
client = ConnectionServer ( file_server . ip , 1545 )
2015-10-22 11:42:55 +02:00
site_temp . connection_server = client
site_temp . announce = mock . MagicMock ( return_value = True ) # Don't try to find peers from the net
2019-01-20 19:07:16 +01:00
site_temp . addPeer ( file_server . ip , 1544 )
2015-10-22 11:42:55 +02:00
# Download site
site_temp . download ( blind_includes = True ) . join ( timeout = 5 )
# Download optional data/optional.txt
site . storage . verifyFiles ( quick_check = True ) # Find what optional files we have
optional_file_info = site_temp . content_manager . getFileInfo ( " data/optional.txt " )
assert site . content_manager . hashfield . hasHash ( optional_file_info [ " sha512 " ] )
assert not site_temp . content_manager . hashfield . hasHash ( optional_file_info [ " sha512 " ] )
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
assert not site_temp . storage . isFile ( " data/optional.txt " )
assert site . storage . isFile ( " data/optional.txt " )
site_temp . needFile ( " data/optional.txt " )
assert site_temp . storage . isFile ( " data/optional.txt " )
# Optional user file
assert not site_temp . storage . isFile ( " data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif " )
2015-10-11 02:22:53 +02:00
optional_file_info = site_temp . content_manager . getFileInfo (
" data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif "
)
2015-10-22 11:42:55 +02:00
assert site . content_manager . hashfield . hasHash ( optional_file_info [ " sha512 " ] )
2015-10-11 02:22:53 +02:00
assert not site_temp . content_manager . hashfield . hasHash ( optional_file_info [ " sha512 " ] )
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
site_temp . needFile ( " data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif " )
assert site_temp . storage . isFile ( " data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif " )
2015-10-11 02:22:53 +02:00
assert site_temp . content_manager . hashfield . hasHash ( optional_file_info [ " sha512 " ] )
Rev467, requirements.txt accept newer dependecies, Boost dbschema.json, Move getDirname getFilename to helper, Verify optional files, Includes not allowed in user files, Optional files rules, Peer hashfield functions, Test optional files signing, Test file info, Test verify file, Test helpers
2015-10-01 01:35:13 +02:00
assert site_temp . storage . deleteFiles ( )
2015-10-22 11:42:55 +02:00
[ connection . close ( ) for connection in file_server . connections ]
# Test when connected peer does not has the file, so ask him if he know someone who has it
def testFindOptional ( self , file_server , site , site_temp ) :
# Init source server
site . connection_server = file_server
file_server . sites [ site . address ] = site
# Init full source server (has optional files)
site_full = Site ( " 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT " )
2019-01-20 19:07:16 +01:00
file_server_full = FileServer ( file_server . ip , 1546 )
2015-10-22 11:42:55 +02:00
site_full . connection_server = file_server_full
2018-04-29 03:05:36 +02:00
def listen ( ) :
ConnectionServer . start ( file_server_full )
ConnectionServer . listen ( file_server_full )
gevent . spawn ( listen )
2016-03-16 00:34:57 +01:00
time . sleep ( 0.001 ) # Port opening
2015-10-22 11:42:55 +02:00
file_server_full . sites [ site_full . address ] = site_full # Add site
site_full . storage . verifyFiles ( quick_check = True ) # Check optional files
2019-01-20 19:07:16 +01:00
site_full_peer = site . addPeer ( file_server . ip , 1546 ) # Add it to source server
2016-11-07 22:44:03 +01:00
hashfield = site_full_peer . updateHashfield ( ) # Update hashfield
assert len ( site_full . content_manager . hashfield ) == 8
assert hashfield
2016-09-05 13:58:10 +02:00
assert site_full . storage . isFile ( " data/optional.txt " )
assert site_full . storage . isFile ( " data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif " )
assert len ( site_full_peer . hashfield ) == 8
# Remove hashes from source server
for hash in list ( site . content_manager . hashfield ) :
site . content_manager . hashfield . remove ( hash )
2015-10-22 11:42:55 +02:00
# Init client server
2019-01-20 19:07:16 +01:00
site_temp . connection_server = ConnectionServer ( file_server . ip , 1545 )
site_temp . addPeer ( file_server . ip , 1544 ) # Add source server
2015-10-22 11:42:55 +02:00
# Download normal files
2016-09-05 13:58:10 +02:00
site_temp . log . info ( " Start Downloading site " )
2015-10-22 11:42:55 +02:00
site_temp . download ( blind_includes = True ) . join ( timeout = 5 )
# Download optional data/optional.txt
optional_file_info = site_temp . content_manager . getFileInfo ( " data/optional.txt " )
2016-09-05 13:58:10 +02:00
optional_file_info2 = site_temp . content_manager . getFileInfo ( " data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif " )
2015-10-22 11:42:55 +02:00
assert not site_temp . storage . isFile ( " data/optional.txt " )
2016-09-05 13:58:10 +02:00
assert not site_temp . storage . isFile ( " data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif " )
2015-10-22 11:42:55 +02:00
assert not site . content_manager . hashfield . hasHash ( optional_file_info [ " sha512 " ] ) # Source server don't know he has the file
2016-09-05 13:58:10 +02:00
assert not site . content_manager . hashfield . hasHash ( optional_file_info2 [ " sha512 " ] ) # Source server don't know he has the file
2015-10-22 11:42:55 +02:00
assert site_full_peer . hashfield . hasHash ( optional_file_info [ " sha512 " ] ) # Source full peer on source server has the file
2016-09-05 13:58:10 +02:00
assert site_full_peer . hashfield . hasHash ( optional_file_info2 [ " sha512 " ] ) # Source full peer on source server has the file
2015-10-22 11:42:55 +02:00
assert site_full . content_manager . hashfield . hasHash ( optional_file_info [ " sha512 " ] ) # Source full server he has the file
2016-09-05 13:58:10 +02:00
assert site_full . content_manager . hashfield . hasHash ( optional_file_info2 [ " sha512 " ] ) # Source full server he has the file
2015-10-22 11:42:55 +02:00
2016-09-05 13:58:10 +02:00
site_temp . log . info ( " Request optional files " )
2015-10-22 11:42:55 +02:00
with Spy . Spy ( FileRequest , " route " ) as requests :
2015-10-28 01:28:29 +01:00
# Request 2 file same time
threads = [ ]
threads . append ( site_temp . needFile ( " data/optional.txt " , blocking = False ) )
threads . append ( site_temp . needFile ( " data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif " , blocking = False ) )
gevent . joinall ( threads )
2017-10-04 13:31:49 +02:00
assert len ( [ request for request in requests if request [ 1 ] == " findHashIds " ] ) == 1 # findHashids should call only once
2015-10-28 01:28:29 +01:00
assert site_temp . storage . isFile ( " data/optional.txt " )
assert site_temp . storage . isFile ( " data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif " )
2015-10-22 11:42:55 +02:00
assert site_temp . storage . deleteFiles ( )
file_server_full . stop ( )
[ connection . close ( ) for connection in file_server . connections ]
2019-01-20 19:07:16 +01:00
site_full . content_manager . contents . db . close ( )
2016-04-06 14:01:20 +02:00
def testUpdate ( self , file_server , site , site_temp ) :
assert site . storage . directory == config . data_dir + " / " + site . address
assert site_temp . storage . directory == config . data_dir + " -temp/ " + site . address
# Init source server
site . connection_server = file_server
file_server . sites [ site . address ] = site
# Init client server
2019-01-20 19:07:16 +01:00
client = FileServer ( file_server . ip , 1545 )
2016-04-06 14:01:20 +02:00
client . sites [ site_temp . address ] = site_temp
site_temp . connection_server = client
# Don't try to find peers from the net
site . announce = mock . MagicMock ( return_value = True )
site_temp . announce = mock . MagicMock ( return_value = True )
# Connect peers
2019-01-20 19:07:16 +01:00
site_temp . addPeer ( file_server . ip , 1544 )
2016-04-06 14:01:20 +02:00
# Download site from site to site_temp
site_temp . download ( blind_includes = True ) . join ( timeout = 5 )
# Update file
data_original = site . storage . open ( " data/data.json " ) . read ( )
2019-03-15 21:06:59 +01:00
data_new = data_original . replace ( b ' " ZeroBlog " ' , b ' " UpdatedZeroBlog " ' )
2016-04-06 14:01:20 +02:00
assert data_original != data_new
site . storage . open ( " data/data.json " , " wb " ) . write ( data_new )
assert site . storage . open ( " data/data.json " ) . read ( ) == data_new
assert site_temp . storage . open ( " data/data.json " ) . read ( ) == data_original
2016-09-05 13:58:10 +02:00
site . log . info ( " Publish new data.json without patch " )
2016-04-06 14:01:20 +02:00
# Publish without patch
with Spy . Spy ( FileRequest , " route " ) as requests :
site . content_manager . sign ( " content.json " , privatekey = " 5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv " )
site . publish ( )
2016-09-05 13:58:10 +02:00
time . sleep ( 0.1 )
2016-04-06 14:01:20 +02:00
site_temp . download ( blind_includes = True ) . join ( timeout = 5 )
2017-10-04 13:31:49 +02:00
assert len ( [ request for request in requests if request [ 1 ] in ( " getFile " , " streamFile " ) ] ) == 1
2016-04-06 14:01:20 +02:00
assert site_temp . storage . open ( " data/data.json " ) . read ( ) == data_new
# Close connection to avoid update spam limit
2019-03-15 21:06:59 +01:00
list ( site . peers . values ( ) ) [ 0 ] . remove ( )
2019-01-20 19:07:16 +01:00
site . addPeer ( file_server . ip , 1545 )
2019-03-15 21:06:59 +01:00
list ( site_temp . peers . values ( ) ) [ 0 ] . ping ( ) # Connect back
2016-04-06 14:01:20 +02:00
time . sleep ( 0.1 )
# Update with patch
2019-03-15 21:06:59 +01:00
data_new = data_original . replace ( b ' " ZeroBlog " ' , b ' " PatchedZeroBlog " ' )
2016-04-06 14:01:20 +02:00
assert data_original != data_new
site . storage . open ( " data/data.json-new " , " wb " ) . write ( data_new )
assert site . storage . open ( " data/data.json-new " ) . read ( ) == data_new
assert site_temp . storage . open ( " data/data.json " ) . read ( ) != data_new
# Generate diff
diffs = site . content_manager . getDiffs ( " content.json " )
assert not site . storage . isFile ( " data/data.json-new " ) # New data file removed
assert site . storage . open ( " data/data.json " ) . read ( ) == data_new # -new postfix removed
assert " data/data.json " in diffs
2019-03-15 21:06:59 +01:00
assert diffs [ " data/data.json " ] == [ ( ' = ' , 2 ) , ( ' - ' , 29 ) , ( ' + ' , [ b ' \t " title " : " PatchedZeroBlog " , \n ' ] ) , ( ' = ' , 31102 ) ]
2016-04-06 14:01:20 +02:00
# Publish with patch
2016-09-05 13:58:10 +02:00
site . log . info ( " Publish new data.json with patch " )
2016-04-06 14:01:20 +02:00
with Spy . Spy ( FileRequest , " route " ) as requests :
site . content_manager . sign ( " content.json " , privatekey = " 5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv " )
site . publish ( diffs = diffs )
site_temp . download ( blind_includes = True ) . join ( timeout = 5 )
assert len ( [ request for request in requests if request [ 0 ] in ( " getFile " , " streamFile " ) ] ) == 0
assert site_temp . storage . open ( " data/data.json " ) . read ( ) == data_new
assert site_temp . storage . deleteFiles ( )
[ connection . close ( ) for connection in file_server . connections ]