2015-07-12 20:36:46 +02:00
|
|
|
import urllib
|
|
|
|
import zipfile
|
|
|
|
import os
|
|
|
|
import ssl
|
|
|
|
import httplib
|
|
|
|
import socket
|
|
|
|
import re
|
2016-12-04 18:57:08 +01:00
|
|
|
import json
|
2015-02-20 01:37:12 +01:00
|
|
|
import cStringIO as StringIO
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
from gevent import monkey
|
|
|
|
monkey.patch_all()
|
2015-02-20 01:37:12 +01:00
|
|
|
|
2016-12-04 21:36:52 +01:00
|
|
|
|
2016-12-04 18:57:08 +01:00
|
|
|
def download():
|
2016-03-16 22:07:42 +01:00
|
|
|
from src.util import helper
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
|
2016-05-06 11:18:22 +02:00
|
|
|
urls = [
|
|
|
|
"https://github.com/HelloZeroNet/ZeroNet/archive/master.zip",
|
|
|
|
"https://gitlab.com/HelloZeroNet/ZeroNet/repository/archive.zip?ref=master",
|
|
|
|
"https://try.gogs.io/ZeroNet/ZeroNet/archive/master.zip"
|
|
|
|
]
|
|
|
|
|
|
|
|
zipdata = None
|
|
|
|
for url in urls:
|
|
|
|
print "Downloading from:", url,
|
|
|
|
try:
|
|
|
|
req = helper.httpRequest(url)
|
|
|
|
data = StringIO.StringIO()
|
|
|
|
while True:
|
|
|
|
buff = req.read(1024 * 16)
|
|
|
|
if not buff:
|
|
|
|
break
|
|
|
|
data.write(buff)
|
|
|
|
print ".",
|
|
|
|
try:
|
|
|
|
zipdata = zipfile.ZipFile(data)
|
|
|
|
break # Success
|
|
|
|
except Exception, err:
|
|
|
|
data.seek(0)
|
|
|
|
print "Unpack error", err, data.read(256)
|
|
|
|
except Exception, err:
|
|
|
|
print "Error downloading update from %s: %s" % (url, err)
|
|
|
|
|
|
|
|
if not zipdata:
|
|
|
|
raise err
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
print "Downloaded."
|
2015-02-20 01:37:12 +01:00
|
|
|
|
2016-12-04 18:57:08 +01:00
|
|
|
return zipdata
|
|
|
|
|
|
|
|
|
|
|
|
def update():
|
|
|
|
from Config import config
|
|
|
|
updatesite_path = config.data_dir + "/" + config.updatesite
|
|
|
|
sites_json = json.load(open(config.data_dir + "/sites.json"))
|
|
|
|
updatesite_bad_files = sites_json.get(config.updatesite, {}).get("cache", {}).get("bad_files", {})
|
|
|
|
print "Update site path: %s, bad_files: %s" % (updatesite_path, len(updatesite_bad_files))
|
|
|
|
if os.path.isfile(updatesite_path + "/content.json") and len(updatesite_bad_files) == 0 and sites_json.get(config.updatesite, {}).get("serving"):
|
|
|
|
# Update site exists and no broken file
|
|
|
|
print "Updating using site %s" % config.updatesite
|
|
|
|
zipdata = False
|
|
|
|
inner_paths = json.load(open(updatesite_path + "/content.json"))["files"].keys()
|
|
|
|
# Keep file only in ZeroNet directory
|
|
|
|
inner_paths = [inner_path for inner_path in inner_paths if inner_path.startswith("ZeroNet/")]
|
|
|
|
else:
|
|
|
|
# Fallback to download
|
|
|
|
zipdata = download()
|
|
|
|
inner_paths = zipdata.namelist()
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Checking plugins
|
|
|
|
plugins_enabled = []
|
|
|
|
plugins_disabled = []
|
|
|
|
if os.path.isdir("plugins"):
|
|
|
|
for dir in os.listdir("plugins"):
|
|
|
|
if dir.startswith("disabled-"):
|
|
|
|
plugins_disabled.append(dir.replace("disabled-", ""))
|
|
|
|
else:
|
|
|
|
plugins_enabled.append(dir)
|
|
|
|
print "Plugins enabled:", plugins_enabled, "disabled:", plugins_disabled
|
2015-02-20 01:37:12 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
print "Extracting...",
|
2016-12-04 18:57:08 +01:00
|
|
|
for inner_path in inner_paths:
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
if ".." in inner_path:
|
|
|
|
continue
|
2015-07-12 20:36:46 +02:00
|
|
|
inner_path = inner_path.replace("\\", "/") # Make sure we have unix path
|
|
|
|
print ".",
|
2016-12-04 18:57:08 +01:00
|
|
|
dest_path = re.sub("^([^/]*-master.*?|ZeroNet)/", "", inner_path) # Skip root zeronet-master-... like directories
|
2016-05-06 11:18:22 +02:00
|
|
|
dest_path = dest_path.lstrip("/")
|
2015-07-12 20:36:46 +02:00
|
|
|
if not dest_path:
|
|
|
|
continue
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Keep plugin disabled/enabled status
|
|
|
|
match = re.match("plugins/([^/]+)", dest_path)
|
|
|
|
if match:
|
|
|
|
plugin_name = match.group(1).replace("disabled-", "")
|
|
|
|
if plugin_name in plugins_enabled: # Plugin was enabled
|
|
|
|
dest_path = dest_path.replace("plugins/disabled-" + plugin_name, "plugins/" + plugin_name)
|
|
|
|
elif plugin_name in plugins_disabled: # Plugin was disabled
|
|
|
|
dest_path = dest_path.replace("plugins/" + plugin_name, "plugins/disabled-" + plugin_name)
|
|
|
|
print "P",
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
dest_dir = os.path.dirname(dest_path)
|
version 0.2.7, plugin system, multiuser plugin for zeroproxies, reworked imports, cookie parse, stats moved to plugin, usermanager class, dont generate site auth on listing, multiline notifications, allow server side prompt from user, update script keep plugins disabled status
2015-03-24 01:33:09 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
if dest_dir and not os.path.isdir(dest_dir):
|
|
|
|
os.makedirs(dest_dir)
|
2015-02-20 01:37:12 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
if dest_dir != dest_path.strip("/"):
|
2016-12-04 18:57:08 +01:00
|
|
|
if zipdata:
|
|
|
|
data = zipdata.read(inner_path)
|
|
|
|
else:
|
2016-12-04 21:36:52 +01:00
|
|
|
data = open(updatesite_path + "/" + inner_path, "rb").read()
|
2016-12-04 18:57:08 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
try:
|
|
|
|
open(dest_path, 'wb').write(data)
|
|
|
|
except Exception, err:
|
|
|
|
print dest_path, err
|
2015-02-20 01:37:12 +01:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
print "Done."
|
2016-03-16 22:07:42 +01:00
|
|
|
return True
|
2015-02-20 01:37:12 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2016-04-06 13:34:51 +02:00
|
|
|
# Fix broken gevent SSL
|
|
|
|
import sys
|
|
|
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # Imports relative to src
|
|
|
|
from Config import config
|
|
|
|
config.parse()
|
|
|
|
from src.util import SslPatch
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
try:
|
|
|
|
update()
|
|
|
|
except Exception, err:
|
|
|
|
print "Update error: %s" % err
|
|
|
|
raw_input("Press enter to exit")
|