2015-07-12 20:36:46 +02:00
|
|
|
import os
|
2017-01-21 23:01:17 +01:00
|
|
|
import sys
|
2019-04-04 13:28:02 +02:00
|
|
|
import json
|
|
|
|
import re
|
2019-09-04 20:15:37 +02:00
|
|
|
import shutil
|
2016-12-04 18:57:08 +01:00
|
|
|
|
2019-05-30 04:29:47 +02:00
|
|
|
|
2016-12-04 18:57:08 +01:00
|
|
|
def update():
|
2019-04-04 13:28:02 +02:00
|
|
|
from Config import config
|
2020-09-04 17:17:15 +02:00
|
|
|
config.parse(silent=True)
|
2019-04-04 13:28:02 +02:00
|
|
|
|
|
|
|
if getattr(sys, 'source_update_dir', False):
|
|
|
|
if not os.path.isdir(sys.source_update_dir):
|
|
|
|
os.makedirs(sys.source_update_dir)
|
|
|
|
source_path = sys.source_update_dir.rstrip("/")
|
|
|
|
else:
|
|
|
|
source_path = os.getcwd().rstrip("/")
|
|
|
|
|
2019-09-04 20:15:49 +02:00
|
|
|
if config.dist_type.startswith("bundle_linux"):
|
|
|
|
runtime_path = os.path.normpath(os.path.dirname(sys.executable) + "/../..")
|
|
|
|
else:
|
|
|
|
runtime_path = os.path.dirname(sys.executable)
|
2019-05-30 04:29:47 +02:00
|
|
|
|
2019-04-04 13:28:02 +02:00
|
|
|
updatesite_path = config.data_dir + "/" + config.updatesite
|
|
|
|
|
|
|
|
sites_json = json.load(open(config.data_dir + "/sites.json"))
|
|
|
|
updatesite_bad_files = sites_json.get(config.updatesite, {}).get("cache", {}).get("bad_files", {})
|
|
|
|
print(
|
2019-05-30 04:29:47 +02:00
|
|
|
"Update site path: %s, bad_files: %s, source path: %s, runtime path: %s, dist type: %s" %
|
|
|
|
(updatesite_path, len(updatesite_bad_files), source_path, runtime_path, config.dist_type)
|
2019-04-04 13:28:02 +02:00
|
|
|
)
|
|
|
|
|
2019-05-30 04:29:47 +02:00
|
|
|
updatesite_content_json = json.load(open(updatesite_path + "/content.json"))
|
|
|
|
inner_paths = list(updatesite_content_json.get("files", {}).keys())
|
|
|
|
inner_paths += list(updatesite_content_json.get("files_optional", {}).keys())
|
2019-04-04 13:28:02 +02:00
|
|
|
|
|
|
|
# Keep file only in ZeroNet directory
|
2019-05-30 04:29:47 +02:00
|
|
|
inner_paths = [inner_path for inner_path in inner_paths if re.match("^(core|bundle)", inner_path)]
|
2019-04-04 13:28:02 +02:00
|
|
|
|
|
|
|
# Checking plugins
|
|
|
|
plugins_enabled = []
|
|
|
|
plugins_disabled = []
|
|
|
|
if os.path.isdir("%s/plugins" % source_path):
|
|
|
|
for dir in os.listdir("%s/plugins" % source_path):
|
|
|
|
if dir.startswith("disabled-"):
|
|
|
|
plugins_disabled.append(dir.replace("disabled-", ""))
|
|
|
|
else:
|
|
|
|
plugins_enabled.append(dir)
|
|
|
|
print("Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled)
|
|
|
|
|
2019-05-30 04:29:47 +02:00
|
|
|
update_paths = {}
|
|
|
|
|
2019-04-04 13:28:02 +02:00
|
|
|
for inner_path in inner_paths:
|
|
|
|
if ".." in inner_path:
|
|
|
|
continue
|
2019-04-09 16:21:39 +02:00
|
|
|
inner_path = inner_path.replace("\\", "/").strip("/") # Make sure we have unix path
|
2019-04-04 13:28:02 +02:00
|
|
|
print(".", end=" ")
|
2019-05-30 04:29:47 +02:00
|
|
|
if inner_path.startswith("core"):
|
|
|
|
dest_path = source_path + "/" + re.sub("^core/", "", inner_path)
|
|
|
|
elif inner_path.startswith(config.dist_type):
|
|
|
|
dest_path = runtime_path + "/" + re.sub("^bundle[^/]+/", "", inner_path)
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
2019-04-04 13:28:02 +02:00
|
|
|
if not dest_path:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Keep plugin disabled/enabled status
|
2019-08-13 21:07:26 +02:00
|
|
|
match = re.match(re.escape(source_path) + "/plugins/([^/]+)", dest_path)
|
2019-04-04 13:28:02 +02:00
|
|
|
if match:
|
|
|
|
plugin_name = match.group(1).replace("disabled-", "")
|
|
|
|
if plugin_name in plugins_enabled: # Plugin was enabled
|
|
|
|
dest_path = dest_path.replace("plugins/disabled-" + plugin_name, "plugins/" + plugin_name)
|
|
|
|
elif plugin_name in plugins_disabled: # Plugin was disabled
|
|
|
|
dest_path = dest_path.replace("plugins/" + plugin_name, "plugins/disabled-" + plugin_name)
|
|
|
|
print("P", end=" ")
|
|
|
|
|
|
|
|
dest_dir = os.path.dirname(dest_path)
|
|
|
|
if dest_dir and not os.path.isdir(dest_dir):
|
|
|
|
os.makedirs(dest_dir)
|
|
|
|
|
|
|
|
if dest_dir != dest_path.strip("/"):
|
2019-05-30 04:29:47 +02:00
|
|
|
update_paths[updatesite_path + "/" + inner_path] = dest_path
|
|
|
|
|
|
|
|
num_ok = 0
|
|
|
|
num_rename = 0
|
|
|
|
num_error = 0
|
|
|
|
for path_from, path_to in update_paths.items():
|
|
|
|
print("-", path_from, "->", path_to)
|
2019-06-12 02:57:18 +02:00
|
|
|
if not os.path.isfile(path_from):
|
|
|
|
print("Missing file")
|
|
|
|
continue
|
|
|
|
|
2019-05-30 04:29:47 +02:00
|
|
|
data = open(path_from, "rb").read()
|
|
|
|
|
|
|
|
try:
|
|
|
|
open(path_to, 'wb').write(data)
|
|
|
|
num_ok += 1
|
|
|
|
except Exception as err:
|
2019-04-04 13:28:02 +02:00
|
|
|
try:
|
2019-09-04 20:15:37 +02:00
|
|
|
print("Error writing: %s. Renaming old file as workaround..." % err)
|
2019-05-30 04:29:47 +02:00
|
|
|
path_to_tmp = path_to + "-old"
|
|
|
|
if os.path.isfile(path_to_tmp):
|
|
|
|
os.unlink(path_to_tmp)
|
|
|
|
os.rename(path_to, path_to_tmp)
|
|
|
|
num_rename += 1
|
|
|
|
open(path_to, 'wb').write(data)
|
2019-09-04 20:15:37 +02:00
|
|
|
shutil.copymode(path_to_tmp, path_to) # Copy permissions
|
2019-05-30 04:29:47 +02:00
|
|
|
print("Write done after rename!")
|
|
|
|
num_ok += 1
|
2019-04-04 13:28:02 +02:00
|
|
|
except Exception as err:
|
2019-05-30 04:29:47 +02:00
|
|
|
print("Write error after rename: %s" % err)
|
|
|
|
num_error += 1
|
|
|
|
print("* Updated files: %s, renamed: %s, error: %s" % (num_ok, num_rename, num_error))
|
2019-04-04 13:28:02 +02:00
|
|
|
|
2015-02-20 01:37:12 +01:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2016-04-06 13:34:51 +02:00
|
|
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # Imports relative to src
|
2017-06-05 01:07:39 +02:00
|
|
|
|
2019-08-13 21:07:26 +02:00
|
|
|
update()
|