This commit is contained in:
peer-to-peer-network 2020-09-12 21:37:21 -04:00 committed by GitHub
commit 04c459dec0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 86 additions and 103 deletions

View File

@ -14,7 +14,7 @@ before_install:
# - docker build -t zeronet .
# - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet
install:
- pip install --upgrade -r requirements.txt
- pip install --upgrade --no-cache-dir -r requirements.txt
- pip list
before_script:
- openssl version -a

View File

@ -101,8 +101,8 @@ class UiRequestPlugin(object):
read, upload_info["size"], upload_info["piece_size"], out_file
)
if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split
hash = binascii.hexlify(piecemap_info["sha512_pieces"][0])
if len(piecemap_info["blake3_pieces"]) == 1: # Small file, don't split
hash = binascii.hexlify(piecemap_info["blake3_pieces"][0])
hash_id = site.content_manager.hashfield.getHashId(hash)
site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True)
@ -125,7 +125,7 @@ class UiRequestPlugin(object):
content["files_optional"] = {}
content["files_optional"][file_relative_path] = {
"sha512": merkle_root,
"blake3": merkle_root,
"size": upload_info["size"],
"piecemap": piecemap_relative_path,
"piece_size": piece_size
@ -139,7 +139,7 @@ class UiRequestPlugin(object):
return {
"merkle_root": merkle_root,
"piece_num": len(piecemap_info["sha512_pieces"]),
"piece_num": len(piecemap_info["blake3_pieces"]),
"piece_size": piece_size,
"inner_path": inner_path
}
@ -283,12 +283,12 @@ class ContentManagerPlugin(object):
recv = 0
try:
piece_hash = CryptHash.sha512t()
piece_hash = CryptHash.blake3t()
piece_hashes = []
piece_recv = 0
mt = merkletools.MerkleTools()
mt.hash_function = CryptHash.sha512t
mt.hash_function = CryptHash.blake3t
part = ""
for part in self.readFile(read_func, size):
@ -302,7 +302,7 @@ class ContentManagerPlugin(object):
piece_digest = piece_hash.digest()
piece_hashes.append(piece_digest)
mt.leaves.append(piece_digest)
piece_hash = CryptHash.sha512t()
piece_hash = CryptHash.blake3t()
piece_recv = 0
if len(piece_hashes) % 100 == 0 or recv == size:
@ -325,7 +325,7 @@ class ContentManagerPlugin(object):
if type(merkle_root) is bytes: # Python <3.5
merkle_root = merkle_root.decode()
return merkle_root, piece_size, {
"sha512_pieces": piece_hashes
"blake3_pieces": piece_hashes
}
def hashFile(self, dir_inner_path, file_relative_path, optional=False):
@ -348,7 +348,7 @@ class ContentManagerPlugin(object):
file_node = content["files_optional"][file_relative_path]
if file_node["size"] == file_size:
self.log.info("- [SAME SIZE] %s" % file_relative_path)
hash = file_node.get("sha512")
hash = file_node.get("blake3")
piecemap_relative_path = file_node.get("piecemap")
piece_size = file_node.get("piece_size")
@ -377,7 +377,7 @@ class ContentManagerPlugin(object):
self.optionalDownloaded(inner_path, hash_id, file_size, own=True)
self.site.storage.piecefields[hash].frombytes(b"\x01" * piece_num)
back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size}
back[file_relative_path] = {"blake3": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size}
return back
def getPiecemap(self, inner_path):
@ -395,7 +395,7 @@ class ContentManagerPlugin(object):
raise VerifyError("Unable to download piecemap: %s" % Debug.formatException(err))
piece_i = int(pos / piecemap["piece_size"])
if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]:
if CryptHash.b3sum(piece, format="digest") != piecemap["blake3_pieces"][piece_i]:
raise VerifyError("Invalid hash")
return True
@ -416,16 +416,16 @@ class ContentManagerPlugin(object):
# Mark piece downloaded
piece_i = int(pos_from / file_info["piece_size"])
self.site.storage.piecefields[file_info["sha512"]][piece_i] = b"\x01"
self.site.storage.piecefields[file_info["blake3"]][piece_i] = b"\x01"
# Only add to site size on first request
if hash_id in self.hashfield:
size = 0
elif size > 1024 * 1024:
file_info = self.getFileInfo(inner_path)
if file_info and "sha512" in file_info: # We already have the file, but not in piecefield
sha512 = file_info["sha512"]
if sha512 not in self.site.storage.piecefields:
if file_info and "blake3" in file_info: # We already have the file, but not in piecefield
blake3 = file_info["blake3"]
if blake3 not in self.site.storage.piecefields:
self.site.storage.checkBigfile(inner_path)
return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own)
@ -433,9 +433,9 @@ class ContentManagerPlugin(object):
def optionalRemoved(self, inner_path, hash_id, size=None):
if size and size > 1024 * 1024:
file_info = self.getFileInfo(inner_path)
sha512 = file_info["sha512"]
if sha512 in self.site.storage.piecefields:
del self.site.storage.piecefields[sha512]
blake3 = file_info["blake3"]
if blake3 in self.site.storage.piecefields:
del self.site.storage.piecefields[blake3]
# Also remove other pieces of the file from download queue
for key in list(self.site.bad_files.keys()):
@ -451,12 +451,12 @@ class SiteStoragePlugin(object):
super(SiteStoragePlugin, self).__init__(*args, **kwargs)
self.piecefields = collections.defaultdict(BigfilePiecefield)
if "piecefields" in self.site.settings.get("cache", {}):
for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items():
for blake3, piecefield_packed in self.site.settings["cache"].get("piecefields").items():
if piecefield_packed:
self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed))
self.piecefields[blake3].unpack(base64.b64decode(piecefield_packed))
self.site.settings["cache"]["piecefields"] = {}
def createSparseFile(self, inner_path, size, sha512=None):
def createSparseFile(self, inner_path, size, blake3=None):
file_path = self.getPath(inner_path)
self.ensureDir(os.path.dirname(inner_path))
@ -469,9 +469,9 @@ class SiteStoragePlugin(object):
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.call(["fsutil", "sparse", "setflag", file_path], close_fds=True, startupinfo=startupinfo)
if sha512 and sha512 in self.piecefields:
if blake3 and blake3 in self.piecefields:
self.log.debug("%s: File not exists, but has piecefield. Deleting piecefield." % inner_path)
del self.piecefields[sha512]
del self.piecefields[blake3]
def write(self, inner_path, content):
if "|" not in inner_path:
@ -506,20 +506,20 @@ class SiteStoragePlugin(object):
self.site.settings["has_bigfile"] = True
file_path = self.getPath(inner_path)
sha512 = file_info["sha512"]
blake3 = file_info["blake3"]
piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"]))
if os.path.isfile(file_path):
if sha512 not in self.piecefields:
if blake3 not in self.piecefields:
if open(file_path, "rb").read(128) == b"\0" * 128:
piece_data = b"\x00"
else:
piece_data = b"\x01"
self.log.debug("%s: File exists, but not in piecefield. Filling piecefiled with %s * %s." % (inner_path, piece_num, piece_data))
self.piecefields[sha512].frombytes(piece_data * piece_num)
self.piecefields[blake3].frombytes(piece_data * piece_num)
else:
self.log.debug("Creating bigfile: %s" % inner_path)
self.createSparseFile(inner_path, file_info["size"], sha512)
self.piecefields[sha512].frombytes(b"\x00" * piece_num)
self.createSparseFile(inner_path, file_info["size"], blake3)
self.piecefields[blake3].frombytes(b"\x00" * piece_num)
self.log.debug("Created bigfile: %s" % inner_path)
return True
@ -537,12 +537,12 @@ class BigFile(object):
file_path = site.storage.getPath(inner_path)
file_info = self.site.content_manager.getFileInfo(inner_path)
self.piece_size = file_info["piece_size"]
self.sha512 = file_info["sha512"]
self.blake3 = file_info["blake3"]
self.size = file_info["size"]
self.prebuffer = prebuffer
self.read_bytes = 0
self.piecefield = self.site.storage.piecefields[self.sha512]
self.piecefield = self.site.storage.piecefields[self.blake3]
self.f = open(file_path, "rb+")
self.read_lock = gevent.lock.Semaphore()
@ -636,7 +636,7 @@ class WorkerManagerPlugin(object):
inner_path, file_range = inner_path.split("|")
pos_from, pos_to = map(int, file_range.split("-"))
task["piece_i"] = int(pos_from / file_info["piece_size"])
task["sha512"] = file_info["sha512"]
task["blake3"] = file_info["blake3"]
else:
if inner_path in self.site.bad_files:
del self.site.bad_files[inner_path]
@ -648,17 +648,17 @@ class WorkerManagerPlugin(object):
task = {"evt": fake_evt}
if not self.site.storage.isFile(inner_path):
self.site.storage.createSparseFile(inner_path, file_info["size"], file_info["sha512"])
self.site.storage.createSparseFile(inner_path, file_info["size"], file_info["blake3"])
piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"]))
self.site.storage.piecefields[file_info["sha512"]].frombytes(b"\x00" * piece_num)
self.site.storage.piecefields[file_info["blake3"]].frombytes(b"\x00" * piece_num)
else:
task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs)
return task
def taskAddPeer(self, task, peer):
if "piece_i" in task:
if not peer.piecefields[task["sha512"]][task["piece_i"]]:
if task["sha512"] not in peer.piecefields:
if not peer.piecefields[task["blake3"]][task["piece_i"]]:
if task["blake3"] not in peer.piecefields:
gevent.spawn(peer.updatePiecefields, force=True)
elif not task["peers"]:
gevent.spawn(peer.updatePiecefields)
@ -676,7 +676,7 @@ class FileRequestPlugin(object):
file_info = site.content_manager.getFileInfo(inner_path)
if "piece_size" in file_info:
piece_i = int(pos / file_info["piece_size"])
if not site.storage.piecefields[file_info["sha512"]][piece_i]:
if not site.storage.piecefields[file_info["blake3"]][piece_i]:
return False
# Seek back to position we want to read
file.seek(pos)
@ -693,7 +693,7 @@ class FileRequestPlugin(object):
if not peer.connection: # Just added
peer.connect(self.connection) # Assign current connection to peer
piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()}
piecefields_packed = {blake3: piecefield.pack() for blake3, piecefield in site.storage.piecefields.items()}
self.response({"piecefields_packed": piecefields_packed})
def actionSetPiecefields(self, params):
@ -709,8 +709,8 @@ class FileRequestPlugin(object):
peer.connect(self.connection)
peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
for sha512, piecefield_packed in params["piecefields_packed"].items():
peer.piecefields[sha512].unpack(piecefield_packed)
for blake3, piecefield_packed in params["piecefields_packed"].items():
peer.piecefields[blake3].unpack(piecefield_packed)
site.settings["has_bigfile"] = True
self.response({"ok": "Updated"})
@ -744,8 +744,8 @@ class PeerPlugin(object):
self.piecefields = collections.defaultdict(BigfilePiecefieldPacked)
try:
for sha512, piecefield_packed in res["piecefields_packed"].items():
self.piecefields[sha512].unpack(piecefield_packed)
for blake3, piecefield_packed in res["piecefields_packed"].items():
self.piecefields[blake3].unpack(piecefield_packed)
except Exception as err:
self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err))
@ -791,7 +791,7 @@ class SitePlugin(object):
def getSettingsCache(self):
back = super(SitePlugin, self).getSettingsCache()
if self.storage.piecefields:
back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()}
back["piecefields"] = {blake3: base64.b64encode(piecefield.pack()).decode("utf8") for blake3, piecefield in self.storage.piecefields.items()}
return back
def needFile(self, inner_path, *args, **kwargs):
@ -817,7 +817,7 @@ class SitePlugin(object):
file_threads = []
piecefield = self.storage.piecefields.get(file_info["sha512"])
piecefield = self.storage.piecefields.get(file_info["blake3"])
for piece_i in range(piece_num):
piece_from = piece_i * piece_size

View File

@ -1,5 +1,5 @@
gevent>=1.1.0; python_version < "3.8"
gevent>=1.5.0; python_version >= "3.8"
blake3
gevent>=1.4.0
msgpack>=0.4.4
base58
merkletools

View File

@ -153,7 +153,7 @@ class Config(object):
default="content.json", metavar="inner_path")
# SiteVerify
action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
action = self.subparsers.add_parser("siteVerify", help='Verify site files using BLAKE3: address')
action.add_argument('address', help='Site to verify')
# SiteCmd

View File

@ -56,11 +56,11 @@ class ContentManager(object):
def getFileChanges(self, old_files, new_files):
deleted = {key: val for key, val in old_files.items() if key not in new_files}
deleted_hashes = {val.get("sha512"): key for key, val in old_files.items() if key not in new_files}
deleted_hashes = {val.get("blake3"): key for key, val in old_files.items() if key not in new_files}
added = {key: val for key, val in new_files.items() if key not in old_files}
renamed = {}
for relative_path, node in added.items():
hash = node.get("sha512")
hash = node.get("blake3")
if hash in deleted_hashes:
relative_path_old = deleted_hashes[hash]
renamed[relative_path_old] = relative_path
@ -97,15 +97,13 @@ class ContentManager(object):
return [], [] # Content.json not exist
try:
# Get the files where the sha512 changed
# Get the files where the BLAKE2 hash has changed
changed = []
deleted = []
# Check changed
for relative_path, info in new_content.get("files", {}).items():
if "sha512" in info:
hash_type = "sha512"
else: # Backward compatibility
hash_type = "sha1"
if "blake3" in info:
hash_type = "blake3"
new_hash = info[hash_type]
if old_content and old_content["files"].get(relative_path): # We have the file in the old content
@ -118,10 +116,10 @@ class ContentManager(object):
# Check changed optional files
for relative_path, info in new_content.get("files_optional", {}).items():
file_inner_path = content_inner_dir + relative_path
new_hash = info["sha512"]
new_hash = info["blake3"]
if old_content and old_content.get("files_optional", {}).get(relative_path):
# We have the file in the old content
old_hash = old_content["files_optional"][relative_path].get("sha512")
old_hash = old_content["files_optional"][relative_path].get("blake3")
if old_hash != new_hash and self.site.isDownloadable(file_inner_path):
changed.append(file_inner_path) # Download new file
elif old_hash != new_hash and self.hashfield.hasHash(old_hash) and not self.site.settings.get("own"):
@ -171,7 +169,7 @@ class ContentManager(object):
# Check if the deleted file is optional
if old_content.get("files_optional") and old_content["files_optional"].get(file_relative_path):
self.optionalDelete(file_inner_path)
old_hash = old_content["files_optional"][file_relative_path].get("sha512")
old_hash = old_content["files_optional"][file_relative_path].get("blake3")
if self.hashfield.hasHash(old_hash):
old_hash_id = self.hashfield.getHashId(old_hash)
self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][file_relative_path]["size"])
@ -372,9 +370,9 @@ class ContentManager(object):
def isDownloaded(self, inner_path, hash_id=None):
if not hash_id:
file_info = self.getFileInfo(inner_path)
if not file_info or "sha512" not in file_info:
if not file_info or "blake3" not in file_info:
return False
hash_id = self.hashfield.getHashId(file_info["sha512"])
hash_id = self.hashfield.getHashId(file_info["blake3"])
return hash_id in self.hashfield
# Is modified since signing
@ -398,7 +396,7 @@ class ContentManager(object):
return is_modified
# Find the file info line from self.contents
# Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"}
# Return: { "blake3": "156649c396d71ae7aec24b0c708bea0f1b7b8adf633ea69804beba0e2db91a06", "size": 41 , "content_inner_path": "content.json"}
def getFileInfo(self, inner_path, new_file=False):
dirs = inner_path.split("/") # Parent dirs of content.json
inner_path_parts = [dirs.pop()] # Filename relative to content.json
@ -587,11 +585,11 @@ class ContentManager(object):
file_path = self.site.storage.getPath(file_inner_path)
file_size = os.path.getsize(file_path)
sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file
if optional and not self.hashfield.hasHash(sha512sum):
self.optionalDownloaded(file_inner_path, self.hashfield.getHashId(sha512sum), file_size, own=True)
b3sum = CryptHash.b3sum(file_path) # Calculate BLAKE2 sum of a file
if optional and not self.hashfield.hasHash(b3sum):
self.optionalDownloaded(file_inner_path, self.hashfield.getHashId(b3sum), file_size, own=True)
back[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)}
back[file_relative_path] = {"blake3": b3sum, "size": os.path.getsize(file_path)}
return back
def isValidRelativePath(self, relative_path):
@ -677,7 +675,7 @@ class ContentManager(object):
content = {"files": {}, "signs": {}} # Default content.json
if inner_path == "content.json": # It's the root content.json, add some more fields
content["title"] = "%s - ZeroNet_" % self.site.address
content["title"] = self.site.address
content["description"] = ""
content["signs_required"] = 1
content["ignore"] = ""
@ -707,8 +705,8 @@ class ContentManager(object):
files_merged = files_node.copy()
files_merged.update(files_optional_node)
for file_relative_path, file_details in files_merged.items():
old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512")
new_hash = files_merged[file_relative_path]["sha512"]
old_hash = content.get("files", {}).get(file_relative_path, {}).get("blake3")
new_hash = files_merged[file_relative_path]["blake3"]
if old_hash != new_hash:
changed_files.append(inner_directory + file_relative_path)
@ -718,10 +716,10 @@ class ContentManager(object):
self.site.storage.onUpdated(file_path)
# Generate new content.json
self.log.info("Adding timestamp and sha512sums to new content.json...")
self.log.info("Adding timestamp and b3sum to new content.json...")
new_content = content.copy() # Create a copy of current content.json
new_content["files"] = files_node # Add files sha512 hash
new_content["files"] = files_node # Add files BLAKE2 hash
if files_optional_node:
new_content["files_optional"] = files_optional_node
elif "files_optional" in new_content:
@ -1005,10 +1003,10 @@ class ContentManager(object):
self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err)))
raise err
else: # Check using sha512 hash
else: # Check using BLAKE2 hash
file_info = self.getFileInfo(inner_path)
if file_info:
if CryptHash.sha512sum(file) != file_info.get("sha512", ""):
if CryptHash.b3sum(file) != file_info.get("blake3", ""):
raise VerifyError("Invalid hash")
if file_info.get("size", 0) != file.tell():

View File

@ -1,56 +1,41 @@
import hashlib
import blake3
import os
import base64
def sha512sum(file, blocksize=65536, format="hexdigest"):
def b3sum(file, blocksize=65536, format="hexdigest"):
if type(file) is str: # Filename specified
file = open(file, "rb")
hash = hashlib.sha512()
hash = blake3.blake3()
for block in iter(lambda: file.read(blocksize), b""):
hash.update(block)
# Truncate to 256bits is good enough
if format == "hexdigest":
return hash.hexdigest()[0:64]
else:
return hash.digest()[0:32]
def sha256sum(file, blocksize=65536):
if type(file) is str: # Filename specified
file = open(file, "rb")
hash = hashlib.sha256()
for block in iter(lambda: file.read(blocksize), b""):
hash.update(block)
return hash.hexdigest()
def random(length=64, encoding="hex"):
if encoding == "base64": # Characters: A-Za-z0-9
hash = hashlib.sha512(os.urandom(256)).digest()
hash = blake3.blake3(os.urandom(256)).digest()
return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length]
else: # Characters: a-f0-9 (faster)
return hashlib.sha512(os.urandom(256)).hexdigest()[0:length]
return blake3.blake3(os.urandom(256)).hexdigest()[0:length]
# Sha512 truncated to 256bits
class Sha512t:
# blake3 truncated to 256bits
class Blake3t:
def __init__(self, data):
if data:
self.sha512 = hashlib.sha512(data)
self.blake3 = blake3.blake3(data)
else:
self.sha512 = hashlib.sha512()
self.blake3 = blake3.blake3()
def hexdigest(self):
return self.sha512.hexdigest()[0:64]
def digest(self):
return self.sha512.digest()[0:32]
return self.blake3.hexdigest()[0:64]
def update(self, data):
return self.sha512.update(data)
return self.blake3.update(data)
def sha512t(data=None):
return Sha512t(data)
def blake3t(data=None):
return Blake3t(data)

View File

@ -419,7 +419,7 @@ class SiteStorage(object):
raise Exception("File not allowed: %s" % path)
return inner_path
# Verify all files sha512sum using content.json
# Verify all files b3sum using content.json
def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True):
bad_files = []
back = defaultdict(int)
@ -477,7 +477,7 @@ class SiteStorage(object):
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
hash_id = self.site.content_manager.hashfield.getHashId(file_node["sha512"])
hash_id = self.site.content_manager.hashfield.getHashId(file_node["blake3"])
if not os.path.isfile(file_path):
if self.site.content_manager.isDownloaded(file_inner_path, hash_id):
back["num_optional_removed"] += 1

View File

@ -1059,7 +1059,7 @@ class UiWebsocket(object):
is_text_file = ext in ["json", "txt", "html", "js", "css"]
if is_same_size:
if is_text_file:
is_modified = self.site.content_manager.isModified(inner_path) # Check sha512 hash
is_modified = self.site.content_manager.isModified(inner_path) # Check BLAKE3 hash
else:
is_modified = False
else:

View File

@ -267,7 +267,7 @@ class Actions(object):
logging.info("Verifying site files...")
bad_files += site.storage.verifyFiles()["bad_files"]
if not bad_files:
logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s))
logging.info("[OK] All files b3sum matches! (%.3fs)" % (time.time() - s))
else:
logging.error("[ERROR] Error during verifying site files!")