2015-07-12 20:36:46 +02:00
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
import os
|
Rev903, FeedQuery command only available for ADMIN sites, Show bad files in sidebar, Log unknown messages, Add and check inner_path and site address on sign/verify, Better peer cleanup limit, Log site load times, Testcase for address and inner_path verification, Re-sign testsite with new fields, Fix unnecessary loading screen display when browsing sub-folder with index.html, Fix safari notification width
2016-02-18 11:22:21 +01:00
|
|
|
import time
|
2016-09-04 17:51:20 +02:00
|
|
|
import atexit
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2016-04-20 23:28:58 +02:00
|
|
|
import gevent
|
|
|
|
|
2020-01-22 16:35:40 +01:00
|
|
|
import util
|
version 0.2.8, Namecoin domains using internal resolver site, --disable_zeromq option to skip backward compatiblity layer and save some memory, connectionserver firstchar error fixes, missing unpacker crash fix, sitemanager class to allow extensions, add loaded plugin list to websocket api, faster content publishing, mark updating file as bad, remove coppersurfer tracker add eddie4, internal server error with error displaying, allow site domains in UiRequest, better progress bar, wait for siteinfo before before using localstorage, csslater hide only if opacity is 0
2015-03-30 23:44:29 +02:00
|
|
|
from Plugin import PluginManager
|
2016-09-04 17:43:30 +02:00
|
|
|
from Content import ContentDb
|
2015-05-31 15:52:21 +02:00
|
|
|
from Config import config
|
2015-10-18 22:58:02 +02:00
|
|
|
from util import helper
|
2019-08-02 14:06:25 +02:00
|
|
|
from util import RateLimit
|
2019-10-06 03:18:14 +02:00
|
|
|
from util import Cached
|
2015-09-11 02:25:37 +02:00
|
|
|
|
2016-04-20 23:28:58 +02:00
|
|
|
|
version 0.2.8, Namecoin domains using internal resolver site, --disable_zeromq option to skip backward compatiblity layer and save some memory, connectionserver firstchar error fixes, missing unpacker crash fix, sitemanager class to allow extensions, add loaded plugin list to websocket api, faster content publishing, mark updating file as bad, remove coppersurfer tracker add eddie4, internal server error with error displaying, allow site domains in UiRequest, better progress bar, wait for siteinfo before before using localstorage, csslater hide only if opacity is 0
2015-03-30 23:44:29 +02:00
|
|
|
@PluginManager.acceptPlugins
|
|
|
|
class SiteManager(object):
|
2015-07-12 20:36:46 +02:00
|
|
|
def __init__(self):
|
2016-08-10 12:32:28 +02:00
|
|
|
self.log = logging.getLogger("SiteManager")
|
2016-08-16 18:14:02 +02:00
|
|
|
self.log.debug("SiteManager created.")
|
2018-02-08 18:05:50 +01:00
|
|
|
self.sites = {}
|
|
|
|
self.sites_changed = int(time.time())
|
2016-11-07 22:40:12 +01:00
|
|
|
self.loaded = False
|
2016-04-20 23:28:58 +02:00
|
|
|
gevent.spawn(self.saveTimer)
|
2017-08-09 14:21:44 +02:00
|
|
|
atexit.register(lambda: self.save(recalculate_size=True))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Load all sites from data/sites.json
|
2020-01-22 16:35:40 +01:00
|
|
|
@util.Noparallel()
|
2017-11-30 19:38:56 +01:00
|
|
|
def load(self, cleanup=True, startup=False):
|
2020-01-22 16:35:40 +01:00
|
|
|
from Debug import Debug
|
|
|
|
self.log.info("Loading sites... (cleanup: %s, startup: %s)" % (cleanup, startup))
|
2016-11-07 22:40:12 +01:00
|
|
|
self.loaded = False
|
2019-03-15 21:06:59 +01:00
|
|
|
from .Site import Site
|
2015-07-12 20:36:46 +02:00
|
|
|
address_found = []
|
|
|
|
added = 0
|
2020-01-22 16:35:40 +01:00
|
|
|
load_s = time.time()
|
2015-07-12 20:36:46 +02:00
|
|
|
# Load new adresses
|
2019-08-07 14:11:30 +02:00
|
|
|
try:
|
|
|
|
json_path = "%s/sites.json" % config.data_dir
|
|
|
|
data = json.load(open(json_path))
|
|
|
|
except Exception as err:
|
|
|
|
raise Exception("Unable to load %s: %s" % (json_path, err))
|
|
|
|
|
2020-02-20 17:27:31 +01:00
|
|
|
sites_need = []
|
|
|
|
|
2019-08-07 14:11:30 +02:00
|
|
|
for address, settings in data.items():
|
2017-11-30 19:38:56 +01:00
|
|
|
if address not in self.sites:
|
|
|
|
if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
|
|
|
|
# Root content.json exists, try load site
|
|
|
|
s = time.time()
|
|
|
|
try:
|
|
|
|
site = Site(address, settings=settings)
|
|
|
|
site.content_manager.contents.get("content.json")
|
2019-03-15 21:06:59 +01:00
|
|
|
except Exception as err:
|
2017-11-30 19:38:56 +01:00
|
|
|
self.log.debug("Error loading site %s: %s" % (address, err))
|
|
|
|
continue
|
|
|
|
self.sites[address] = site
|
|
|
|
self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s))
|
|
|
|
added += 1
|
2018-10-15 12:59:04 +02:00
|
|
|
elif startup:
|
2017-11-30 19:38:56 +01:00
|
|
|
# No site directory, start download
|
|
|
|
self.log.debug("Found new site in sites.json: %s" % address)
|
2020-02-20 17:27:31 +01:00
|
|
|
sites_need.append([address, settings])
|
2017-11-30 19:38:56 +01:00
|
|
|
added += 1
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
address_found.append(address)
|
|
|
|
|
|
|
|
# Remove deleted adresses
|
2016-09-06 14:52:55 +02:00
|
|
|
if cleanup:
|
2019-03-15 21:06:59 +01:00
|
|
|
for address in list(self.sites.keys()):
|
2016-09-06 14:52:55 +02:00
|
|
|
if address not in address_found:
|
|
|
|
del(self.sites[address])
|
|
|
|
self.log.debug("Removed site: %s" % address)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2016-09-06 14:52:55 +02:00
|
|
|
# Remove orpan sites from contentdb
|
2016-12-04 18:54:43 +01:00
|
|
|
content_db = ContentDb.getContentDb()
|
2017-07-31 14:35:01 +02:00
|
|
|
for row in content_db.execute("SELECT * FROM site").fetchall():
|
2016-12-04 18:54:43 +01:00
|
|
|
address = row["address"]
|
2020-09-08 19:28:41 +02:00
|
|
|
if address not in self.sites and address not in address_found:
|
2016-12-04 18:54:43 +01:00
|
|
|
self.log.info("Deleting orphan site from content.db: %s" % address)
|
2017-07-31 14:35:48 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
content_db.execute("DELETE FROM site WHERE ?", {"address": address})
|
|
|
|
except Exception as err:
|
|
|
|
self.log.error("Can't delete site %s from content_db: %s" % (address, err))
|
|
|
|
|
2016-12-04 18:54:43 +01:00
|
|
|
if address in content_db.site_ids:
|
|
|
|
del content_db.site_ids[address]
|
|
|
|
if address in content_db.sites:
|
|
|
|
del content_db.sites[address]
|
2016-09-04 17:52:14 +02:00
|
|
|
|
2020-02-20 17:27:31 +01:00
|
|
|
self.loaded = True
|
|
|
|
for address, settings in sites_need:
|
|
|
|
gevent.spawn(self.need, address, settings=settings)
|
2015-07-12 20:36:46 +02:00
|
|
|
if added:
|
2020-01-22 16:35:40 +01:00
|
|
|
self.log.info("Added %s sites in %.3fs" % (added, time.time() - load_s))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2019-08-02 14:06:25 +02:00
|
|
|
def saveDelayed(self):
|
|
|
|
RateLimit.callAsync("Save sites.json", allowed_again=5, func=self.save)
|
|
|
|
|
2017-08-09 14:21:44 +02:00
|
|
|
def save(self, recalculate_size=False):
|
2016-04-20 23:28:58 +02:00
|
|
|
if not self.sites:
|
2016-11-07 22:40:12 +01:00
|
|
|
self.log.debug("Save skipped: No sites found")
|
|
|
|
return
|
|
|
|
if not self.loaded:
|
|
|
|
self.log.debug("Save skipped: Not loaded")
|
|
|
|
return
|
2016-04-20 23:28:58 +02:00
|
|
|
s = time.time()
|
2016-08-10 12:33:34 +02:00
|
|
|
data = {}
|
2016-09-04 17:43:30 +02:00
|
|
|
# Generate data file
|
2017-08-09 14:21:44 +02:00
|
|
|
s = time.time()
|
2019-12-17 14:31:55 +01:00
|
|
|
for address, site in list(self.list().items()):
|
2017-08-09 14:21:44 +02:00
|
|
|
if recalculate_size:
|
2017-10-03 15:29:52 +02:00
|
|
|
site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size
|
2016-04-20 23:28:58 +02:00
|
|
|
data[address] = site.settings
|
2017-10-03 14:40:13 +02:00
|
|
|
data[address]["cache"] = site.getSettingsCache()
|
2017-08-09 14:21:44 +02:00
|
|
|
time_generate = time.time() - s
|
2016-09-04 17:43:30 +02:00
|
|
|
|
2017-08-09 14:21:44 +02:00
|
|
|
s = time.time()
|
2016-11-07 22:40:12 +01:00
|
|
|
if data:
|
2019-08-02 14:06:05 +02:00
|
|
|
helper.atomicWrite("%s/sites.json" % config.data_dir, helper.jsonDumps(data).encode("utf8"))
|
2016-11-07 22:40:12 +01:00
|
|
|
else:
|
|
|
|
self.log.debug("Save error: No data")
|
2017-08-09 14:21:44 +02:00
|
|
|
time_write = time.time() - s
|
|
|
|
|
2016-09-04 17:43:30 +02:00
|
|
|
# Remove cache from site settings
|
2019-03-15 21:06:59 +01:00
|
|
|
for address, site in self.list().items():
|
2016-09-04 17:43:30 +02:00
|
|
|
site.settings["cache"] = {}
|
|
|
|
|
2017-08-09 14:21:44 +02:00
|
|
|
self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write))
|
2016-04-20 23:28:58 +02:00
|
|
|
|
|
|
|
def saveTimer(self):
|
|
|
|
while 1:
|
|
|
|
time.sleep(60 * 10)
|
2017-08-09 14:21:44 +02:00
|
|
|
self.save(recalculate_size=True)
|
2016-04-20 23:28:58 +02:00
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Checks if its a valid address
|
|
|
|
def isAddress(self, address):
|
|
|
|
return re.match("^[A-Za-z0-9]{26,35}$", address)
|
|
|
|
|
2017-05-11 18:02:05 +02:00
|
|
|
def isDomain(self, address):
|
|
|
|
return False
|
|
|
|
|
2019-10-06 03:18:14 +02:00
|
|
|
@Cached(timeout=10)
|
|
|
|
def isDomainCached(self, address):
|
|
|
|
return self.isDomain(address)
|
|
|
|
|
2019-08-15 03:05:29 +02:00
|
|
|
def resolveDomain(self, domain):
|
|
|
|
return False
|
|
|
|
|
2019-10-06 03:18:14 +02:00
|
|
|
@Cached(timeout=10)
|
|
|
|
def resolveDomainCached(self, domain):
|
|
|
|
return self.resolveDomain(domain)
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Return: Site object or None if not found
|
|
|
|
def get(self, address):
|
2019-10-06 03:18:14 +02:00
|
|
|
if self.isDomainCached(address):
|
|
|
|
address_resolved = self.resolveDomainCached(address)
|
2019-08-15 03:05:29 +02:00
|
|
|
if address_resolved:
|
|
|
|
address = address_resolved
|
|
|
|
|
2018-02-08 18:05:23 +01:00
|
|
|
if not self.loaded: # Not loaded yet
|
2018-04-28 21:48:36 +02:00
|
|
|
self.log.debug("Loading site: %s)..." % address)
|
2015-07-12 20:36:46 +02:00
|
|
|
self.load()
|
2019-08-15 03:05:29 +02:00
|
|
|
site = self.sites.get(address)
|
|
|
|
|
|
|
|
return site
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2020-02-21 13:58:11 +01:00
|
|
|
def add(self, address, all_file=True, settings=None, **kwargs):
|
2019-09-28 17:01:37 +02:00
|
|
|
from .Site import Site
|
|
|
|
self.sites_changed = int(time.time())
|
|
|
|
# Try to find site with differect case
|
|
|
|
for recover_address, recover_site in list(self.sites.items()):
|
|
|
|
if recover_address.lower() == address.lower():
|
|
|
|
return recover_site
|
|
|
|
|
|
|
|
if not self.isAddress(address):
|
|
|
|
return False # Not address: %s % address
|
|
|
|
self.log.debug("Added new site: %s" % address)
|
|
|
|
config.loadTrackersFile()
|
|
|
|
site = Site(address, settings=settings)
|
|
|
|
self.sites[address] = site
|
|
|
|
if not site.settings["serving"]: # Maybe it was deleted before
|
|
|
|
site.settings["serving"] = True
|
|
|
|
site.saveSettings()
|
|
|
|
if all_file: # Also download user files on first sync
|
|
|
|
site.download(check_size=True, blind_includes=True)
|
|
|
|
return site
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Return or create site and start download site files
|
2020-02-13 17:23:37 +01:00
|
|
|
def need(self, address, *args, **kwargs):
|
2019-10-06 03:18:14 +02:00
|
|
|
if self.isDomainCached(address):
|
|
|
|
address_resolved = self.resolveDomainCached(address)
|
2019-08-15 03:05:29 +02:00
|
|
|
if address_resolved:
|
|
|
|
address = address_resolved
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
site = self.get(address)
|
|
|
|
if not site: # Site not exist yet
|
2020-02-13 17:23:37 +01:00
|
|
|
site = self.add(address, *args, **kwargs)
|
2015-07-12 20:36:46 +02:00
|
|
|
return site
|
|
|
|
|
|
|
|
def delete(self, address):
|
2018-02-08 18:05:50 +01:00
|
|
|
self.sites_changed = int(time.time())
|
2020-01-22 16:35:40 +01:00
|
|
|
self.log.debug("Deleted site: %s" % address)
|
2015-07-12 20:36:46 +02:00
|
|
|
del(self.sites[address])
|
2015-09-11 02:25:37 +02:00
|
|
|
# Delete from sites.json
|
2016-08-10 12:34:28 +02:00
|
|
|
self.save()
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Lazy load sites
|
|
|
|
def list(self):
|
2018-02-08 18:06:19 +01:00
|
|
|
if not self.loaded: # Not loaded yet
|
2016-08-10 12:32:28 +02:00
|
|
|
self.log.debug("Sites not loaded yet...")
|
2017-11-30 19:38:56 +01:00
|
|
|
self.load(startup=True)
|
2015-07-12 20:36:46 +02:00
|
|
|
return self.sites
|
|
|
|
|
|
|
|
|
|
|
|
site_manager = SiteManager() # Singletone
|
|
|
|
|
2018-01-28 16:42:23 +01:00
|
|
|
if config.action == "main": # Don't connect / add myself to peerlist
|
2019-01-20 03:26:12 +01:00
|
|
|
peer_blacklist = [("127.0.0.1", config.fileserver_port), ("::1", config.fileserver_port)]
|
2018-01-28 16:42:23 +01:00
|
|
|
else:
|
|
|
|
peer_blacklist = []
|
|
|
|
|