diff --git a/plugins/FilePack/FilePackPlugin.py b/plugins/FilePack/FilePackPlugin.py
new file mode 100644
index 00000000..a095c6d4
--- /dev/null
+++ b/plugins/FilePack/FilePackPlugin.py
@@ -0,0 +1,193 @@
+import os
+import re
+
+import gevent
+
+from Plugin import PluginManager
+from Config import config
+from Debug import Debug
+
+# Keep archive open for faster reponse times for large sites
+archive_cache = {}
+
+
+def closeArchive(archive_path):
+ if archive_path in archive_cache:
+ del archive_cache[archive_path]
+
+
+def openArchive(archive_path, file_obj=None):
+ if archive_path not in archive_cache:
+ if archive_path.endswith("tar.gz"):
+ import tarfile
+ archive_cache[archive_path] = tarfile.open(archive_path, fileobj=file_obj, mode="r:gz")
+ else:
+ import zipfile
+ archive_cache[archive_path] = zipfile.ZipFile(file_obj or archive_path)
+ gevent.spawn_later(5, lambda: closeArchive(archive_path)) # Close after 5 sec
+
+ archive = archive_cache[archive_path]
+ return archive
+
+
+def openArchiveFile(archive_path, path_within, file_obj=None):
+ archive = openArchive(archive_path, file_obj=file_obj)
+ if archive_path.endswith(".zip"):
+ return archive.open(path_within)
+ else:
+ return archive.extractfile(path_within)
+
+
+@PluginManager.registerTo("UiRequest")
+class UiRequestPlugin(object):
+ def actionSiteMedia(self, path, **kwargs):
+ if ".zip/" in path or ".tar.gz/" in path:
+ file_obj = None
+ path_parts = self.parsePath(path)
+ file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"])
+ match = re.match("^(.*\.(?:tar.gz|zip))/(.*)", file_path)
+ archive_path, path_within = match.groups()
+ if archive_path not in archive_cache:
+ site = self.server.site_manager.get(path_parts["address"])
+ if not site:
+ return self.actionSiteAddPrompt(path)
+ archive_inner_path = site.storage.getInnerPath(archive_path)
+ if not os.path.isfile(archive_path):
+ # Wait until file downloads
+ result = site.needFile(archive_inner_path, priority=10)
+ # Send virutal file path download finished event to remove loading screen
+ site.updateWebsocket(file_done=archive_inner_path)
+ if not result:
+ return self.error404(archive_inner_path)
+ file_obj = site.storage.openBigfile(archive_inner_path)
+ if file_obj == False:
+ file_obj = None
+
+ header_allow_ajax = False
+ if self.get.get("ajax_key"):
+ requester_site = self.server.site_manager.get(path_parts["request_address"])
+ if self.get["ajax_key"] == requester_site.settings["ajax_key"]:
+ header_allow_ajax = True
+ else:
+ return self.error403("Invalid ajax_key")
+
+ try:
+ file = openArchiveFile(archive_path, path_within, file_obj=file_obj)
+ content_type = self.getContentType(file_path)
+ self.sendHeader(200, content_type=content_type, noscript=kwargs.get("header_noscript", False), allow_ajax=header_allow_ajax)
+ return self.streamFile(file)
+ except Exception as err:
+ self.log.debug("Error opening archive file: %s" % Debug.formatException(err))
+ return self.error404(path)
+
+ return super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs)
+
+ def streamFile(self, file):
+ for i in range(100): # Read max 6MB
+ try:
+ block = file.read(60 * 1024)
+ if block:
+ yield block
+ else:
+ raise StopIteration
+ except StopIteration:
+ file.close()
+ break
+
+
+@PluginManager.registerTo("SiteStorage")
+class SiteStoragePlugin(object):
+ def isFile(self, inner_path):
+ if ".zip/" in inner_path or ".tar.gz/" in inner_path:
+ match = re.match("^(.*\.(?:tar.gz|zip))/(.*)", inner_path)
+ archive_inner_path, path_within = match.groups()
+ return super(SiteStoragePlugin, self).isFile(archive_inner_path)
+ else:
+ return super(SiteStoragePlugin, self).isFile(inner_path)
+
+ def openArchive(self, inner_path):
+ archive_path = self.getPath(inner_path)
+ file_obj = None
+ if archive_path not in archive_cache:
+ if not os.path.isfile(archive_path):
+ result = self.site.needFile(inner_path, priority=10)
+ self.site.updateWebsocket(file_done=inner_path)
+ if not result:
+ raise Exception("Unable to download file")
+ file_obj = self.site.storage.openBigfile(inner_path)
+ if file_obj == False:
+ file_obj = None
+
+ try:
+ archive = openArchive(archive_path, file_obj=file_obj)
+ except Exception as err:
+ raise Exception("Unable to download file: %s" % Debug.formatException(err))
+
+ return archive
+
+ def walk(self, inner_path, *args, **kwags):
+ if ".zip" in inner_path or ".tar.gz" in inner_path:
+ match = re.match("^(.*\.(?:tar.gz|zip))(.*)", inner_path)
+ archive_inner_path, path_within = match.groups()
+ archive = self.openArchive(archive_inner_path)
+ path_within = path_within.lstrip("/")
+
+ if archive_inner_path.endswith(".zip"):
+ namelist = [name for name in archive.namelist() if not name.endswith("/")]
+ else:
+ namelist = [item.name for item in archive.getmembers() if not item.isdir()]
+
+ namelist_relative = []
+ for name in namelist:
+ if not name.startswith(path_within):
+ continue
+ name_relative = name.replace(path_within, "", 1).rstrip("/")
+ namelist_relative.append(name_relative)
+
+ return namelist_relative
+
+ else:
+ return super(SiteStoragePlugin, self).walk(inner_path, *args, **kwags)
+
+ def list(self, inner_path, *args, **kwags):
+ if ".zip" in inner_path or ".tar.gz" in inner_path:
+ match = re.match("^(.*\.(?:tar.gz|zip))(.*)", inner_path)
+ archive_inner_path, path_within = match.groups()
+ archive = self.openArchive(archive_inner_path)
+ path_within = path_within.lstrip("/")
+
+ if archive_inner_path.endswith(".zip"):
+ namelist = [name for name in archive.namelist()]
+ else:
+ namelist = [item.name for item in archive.getmembers()]
+
+ namelist_relative = []
+ for name in namelist:
+ if not name.startswith(path_within):
+ continue
+ name_relative = name.replace(path_within, "", 1).rstrip("/")
+
+ if "/" in name_relative: # File is in sub-directory
+ continue
+
+ namelist_relative.append(name_relative)
+ return namelist_relative
+
+ else:
+ return super(SiteStoragePlugin, self).list(inner_path, *args, **kwags)
+
+ def read(self, inner_path, mode="rb", **kwargs):
+ if ".zip/" in inner_path or ".tar.gz/" in inner_path:
+ match = re.match("^(.*\.(?:tar.gz|zip))(.*)", inner_path)
+ archive_inner_path, path_within = match.groups()
+ archive = self.openArchive(archive_inner_path)
+ path_within = path_within.lstrip("/")
+
+ if archive_inner_path.endswith(".zip"):
+ return archive.open(path_within).read()
+ else:
+ return archive.extractfile(path_within).read()
+
+ else:
+ return super(SiteStoragePlugin, self).read(inner_path, mode, **kwargs)
+
diff --git a/plugins/FilePack/__init__.py b/plugins/FilePack/__init__.py
new file mode 100644
index 00000000..660a0920
--- /dev/null
+++ b/plugins/FilePack/__init__.py
@@ -0,0 +1 @@
+from . import FilePackPlugin
\ No newline at end of file
diff --git a/plugins/FilePack/plugin_info.json b/plugins/FilePack/plugin_info.json
new file mode 100644
index 00000000..42112f95
--- /dev/null
+++ b/plugins/FilePack/plugin_info.json
@@ -0,0 +1,5 @@
+{
+ "name": "FilePack",
+ "description": "Transparent web access for Zip and Tar.gz files.",
+ "default": "enabled"
+}
\ No newline at end of file
diff --git a/plugins/Newsfeed/NewsfeedPlugin.py b/plugins/Newsfeed/NewsfeedPlugin.py
new file mode 100644
index 00000000..3eb14d6c
--- /dev/null
+++ b/plugins/Newsfeed/NewsfeedPlugin.py
@@ -0,0 +1,187 @@
+import time
+import re
+
+from Plugin import PluginManager
+from Db.DbQuery import DbQuery
+from Debug import Debug
+from util import helper
+from util.Flag import flag
+
+
+@PluginManager.registerTo("UiWebsocket")
+class UiWebsocketPlugin(object):
+ def formatSiteInfo(self, site, create_user=True):
+ site_info = super(UiWebsocketPlugin, self).formatSiteInfo(site, create_user=create_user)
+ feed_following = self.user.sites.get(site.address, {}).get("follow", None)
+ if feed_following == None:
+ site_info["feed_follow_num"] = None
+ else:
+ site_info["feed_follow_num"] = len(feed_following)
+ return site_info
+
+ def actionFeedFollow(self, to, feeds):
+ self.user.setFeedFollow(self.site.address, feeds)
+ self.user.save()
+ self.response(to, "ok")
+
+ def actionFeedListFollow(self, to):
+ feeds = self.user.sites.get(self.site.address, {}).get("follow", {})
+ self.response(to, feeds)
+
+ @flag.admin
+ def actionFeedQuery(self, to, limit=10, day_limit=3):
+ from Site import SiteManager
+ rows = []
+ stats = []
+
+ total_s = time.time()
+ num_sites = 0
+
+ for address, site_data in list(self.user.sites.items()):
+ feeds = site_data.get("follow")
+ if not feeds:
+ continue
+ if type(feeds) is not dict:
+ self.log.debug("Invalid feed for site %s" % address)
+ continue
+ num_sites += 1
+ for name, query_set in feeds.items():
+ site = SiteManager.site_manager.get(address)
+ if not site or not site.storage.has_db:
+ continue
+
+ s = time.time()
+ try:
+ query_raw, params = query_set
+ query_parts = re.split(r"UNION(?:\s+ALL|)", query_raw)
+ for i, query_part in enumerate(query_parts):
+ db_query = DbQuery(query_part)
+ if day_limit:
+ where = " WHERE %s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit)
+ if "WHERE" in query_part:
+ query_part = re.sub("WHERE (.*?)(?=$| GROUP BY)", where+" AND (\\1)", query_part)
+ else:
+ query_part += where
+ query_parts[i] = query_part
+ query = " UNION ".join(query_parts)
+
+ if ":params" in query:
+ query_params = map(helper.sqlquote, params)
+ query = query.replace(":params", ",".join(query_params))
+
+ res = site.storage.query(query + " ORDER BY date_added DESC LIMIT %s" % limit)
+
+ except Exception as err: # Log error
+ self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
+ stats.append({"site": site.address, "feed_name": name, "error": str(err)})
+ continue
+
+ for row in res:
+ row = dict(row)
+ if not isinstance(row["date_added"], (int, float, complex)):
+ self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"]))
+ continue
+ if row["date_added"] > 1000000000000: # Formatted as millseconds
+ row["date_added"] = row["date_added"] / 1000
+ if "date_added" not in row or row["date_added"] > time.time() + 120:
+ self.log.debug("Newsfeed item from the future from from site %s" % address)
+ continue # Feed item is in the future, skip it
+ row["site"] = address
+ row["feed_name"] = name
+ rows.append(row)
+ stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)})
+ time.sleep(0.001)
+ return self.response(to, {"rows": rows, "stats": stats, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3)})
+
+ def parseSearch(self, search):
+ parts = re.split("(site|type):", search)
+ if len(parts) > 1: # Found filter
+ search_text = parts[0]
+ parts = [part.strip() for part in parts]
+ filters = dict(zip(parts[1::2], parts[2::2]))
+ else:
+ search_text = search
+ filters = {}
+ return [search_text, filters]
+
+ def actionFeedSearch(self, to, search, limit=30, day_limit=30):
+ if "ADMIN" not in self.site.settings["permissions"]:
+ return self.response(to, "FeedSearch not allowed")
+
+ from Site import SiteManager
+ rows = []
+ stats = []
+ num_sites = 0
+ total_s = time.time()
+
+ search_text, filters = self.parseSearch(search)
+
+ for address, site in SiteManager.site_manager.list().items():
+ if not site.storage.has_db:
+ continue
+
+ if "site" in filters:
+ if filters["site"].lower() not in [site.address, site.content_manager.contents["content.json"].get("title").lower()]:
+ continue
+
+ if site.storage.db: # Database loaded
+ feeds = site.storage.db.schema.get("feeds")
+ else:
+ try:
+ feeds = site.storage.loadJson("dbschema.json").get("feeds")
+ except:
+ continue
+
+ if not feeds:
+ continue
+
+ num_sites += 1
+
+ for name, query in feeds.items():
+ s = time.time()
+ try:
+ db_query = DbQuery(query)
+
+ params = []
+ # Filters
+ if search_text:
+ db_query.wheres.append("(%s LIKE ? OR %s LIKE ?)" % (db_query.fields["body"], db_query.fields["title"]))
+ search_like = "%" + search_text.replace(" ", "%") + "%"
+ params.append(search_like)
+ params.append(search_like)
+ if filters.get("type") and filters["type"] not in query:
+ continue
+
+ if day_limit:
+ db_query.wheres.append(
+ "%s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit)
+ )
+
+ # Order
+ db_query.parts["ORDER BY"] = "date_added DESC"
+ db_query.parts["LIMIT"] = str(limit)
+
+ res = site.storage.query(str(db_query), params)
+ except Exception as err:
+ self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err)))
+ stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query})
+ continue
+ for row in res:
+ row = dict(row)
+ if not row["date_added"] or row["date_added"] > time.time() + 120:
+ continue # Feed item is in the future, skip it
+ row["site"] = address
+ row["feed_name"] = name
+ rows.append(row)
+ stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)})
+ return self.response(to, {"rows": rows, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3), "stats": stats})
+
+
+@PluginManager.registerTo("User")
+class UserPlugin(object):
+ # Set queries that user follows
+ def setFeedFollow(self, address, feeds):
+ site_data = self.getSiteData(address)
+ site_data["follow"] = feeds
+ self.save()
+ return site_data
diff --git a/plugins/Newsfeed/__init__.py b/plugins/Newsfeed/__init__.py
new file mode 100644
index 00000000..6e624df6
--- /dev/null
+++ b/plugins/Newsfeed/__init__.py
@@ -0,0 +1 @@
+from . import NewsfeedPlugin
\ No newline at end of file
diff --git a/plugins/OptionalManager/ContentDbPlugin.py b/plugins/OptionalManager/ContentDbPlugin.py
new file mode 100644
index 00000000..f0f8a877
--- /dev/null
+++ b/plugins/OptionalManager/ContentDbPlugin.py
@@ -0,0 +1,414 @@
+import time
+import collections
+import itertools
+import re
+
+import gevent
+
+from util import helper
+from Plugin import PluginManager
+from Config import config
+from Debug import Debug
+
+if "content_db" not in locals().keys(): # To keep between module reloads
+ content_db = None
+
+
+@PluginManager.registerTo("ContentDb")
+class ContentDbPlugin(object):
+ def __init__(self, *args, **kwargs):
+ global content_db
+ content_db = self
+ self.filled = {} # Site addresses that already filled from content.json
+ self.need_filling = False # file_optional table just created, fill data from content.json files
+ self.time_peer_numbers_updated = 0
+ self.my_optional_files = {} # Last 50 site_address/inner_path called by fileWrite (auto-pinning these files)
+ self.optional_files = collections.defaultdict(dict)
+ self.optional_files_loaded = False
+ self.timer_check_optional = helper.timer(60 * 5, self.checkOptionalLimit)
+ super(ContentDbPlugin, self).__init__(*args, **kwargs)
+
+ def getSchema(self):
+ schema = super(ContentDbPlugin, self).getSchema()
+
+ # Need file_optional table
+ schema["tables"]["file_optional"] = {
+ "cols": [
+ ["file_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"],
+ ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"],
+ ["inner_path", "TEXT"],
+ ["hash_id", "INTEGER"],
+ ["size", "INTEGER"],
+ ["peer", "INTEGER DEFAULT 0"],
+ ["uploaded", "INTEGER DEFAULT 0"],
+ ["is_downloaded", "INTEGER DEFAULT 0"],
+ ["is_pinned", "INTEGER DEFAULT 0"],
+ ["time_added", "INTEGER DEFAULT 0"],
+ ["time_downloaded", "INTEGER DEFAULT 0"],
+ ["time_accessed", "INTEGER DEFAULT 0"]
+ ],
+ "indexes": [
+ "CREATE UNIQUE INDEX file_optional_key ON file_optional (site_id, inner_path)",
+ "CREATE INDEX is_downloaded ON file_optional (is_downloaded)"
+ ],
+ "schema_changed": 11
+ }
+
+ return schema
+
+ def initSite(self, site):
+ super(ContentDbPlugin, self).initSite(site)
+ if self.need_filling:
+ self.fillTableFileOptional(site)
+
+ def checkTables(self):
+ changed_tables = super(ContentDbPlugin, self).checkTables()
+ if "file_optional" in changed_tables:
+ self.need_filling = True
+ return changed_tables
+
+ # Load optional files ending
+ def loadFilesOptional(self):
+ s = time.time()
+ num = 0
+ total = 0
+ total_downloaded = 0
+ res = content_db.execute("SELECT site_id, inner_path, size, is_downloaded FROM file_optional")
+ site_sizes = collections.defaultdict(lambda: collections.defaultdict(int))
+ for row in res:
+ self.optional_files[row["site_id"]][row["inner_path"][-8:]] = 1
+ num += 1
+
+ # Update site size stats
+ site_sizes[row["site_id"]]["size_optional"] += row["size"]
+ if row["is_downloaded"]:
+ site_sizes[row["site_id"]]["optional_downloaded"] += row["size"]
+
+ # Site site size stats to sites.json settings
+ site_ids_reverse = {val: key for key, val in self.site_ids.items()}
+ for site_id, stats in site_sizes.items():
+ site_address = site_ids_reverse.get(site_id)
+ if not site_address or site_address not in self.sites:
+ self.log.error("Not found site_id: %s" % site_id)
+ continue
+ site = self.sites[site_address]
+ site.settings["size_optional"] = stats["size_optional"]
+ site.settings["optional_downloaded"] = stats["optional_downloaded"]
+ total += stats["size_optional"]
+ total_downloaded += stats["optional_downloaded"]
+
+ self.log.info(
+ "Loaded %s optional files: %.2fMB, downloaded: %.2fMB in %.3fs" %
+ (num, float(total) / 1024 / 1024, float(total_downloaded) / 1024 / 1024, time.time() - s)
+ )
+
+ if self.need_filling and self.getOptionalLimitBytes() >= 0 and self.getOptionalLimitBytes() < total_downloaded:
+ limit_bytes = self.getOptionalLimitBytes()
+ limit_new = round((float(total_downloaded) / 1024 / 1024 / 1024) * 1.1, 2) # Current limit + 10%
+ self.log.info(
+ "First startup after update and limit is smaller than downloaded files size (%.2fGB), increasing it from %.2fGB to %.2fGB" %
+ (float(total_downloaded) / 1024 / 1024 / 1024, float(limit_bytes) / 1024 / 1024 / 1024, limit_new)
+ )
+ config.saveValue("optional_limit", limit_new)
+ config.optional_limit = str(limit_new)
+
+ # Predicts if the file is optional
+ def isOptionalFile(self, site_id, inner_path):
+ return self.optional_files[site_id].get(inner_path[-8:])
+
+ # Fill file_optional table with optional files found in sites
+ def fillTableFileOptional(self, site):
+ s = time.time()
+ site_id = self.site_ids.get(site.address)
+ if not site_id:
+ return False
+ cur = self.getCursor()
+ res = cur.execute("SELECT * FROM content WHERE size_files_optional > 0 AND site_id = %s" % site_id)
+ num = 0
+ for row in res.fetchall():
+ content = site.content_manager.contents[row["inner_path"]]
+ try:
+ num += self.setContentFilesOptional(site, row["inner_path"], content, cur=cur)
+ except Exception as err:
+ self.log.error("Error loading %s into file_optional: %s" % (row["inner_path"], err))
+ cur.close()
+
+ # Set my files to pinned
+ from User import UserManager
+ user = UserManager.user_manager.get()
+ if not user:
+ user = UserManager.user_manager.create()
+ auth_address = user.getAuthAddress(site.address)
+ res = self.execute(
+ "UPDATE file_optional SET is_pinned = 1 WHERE site_id = :site_id AND inner_path LIKE :inner_path",
+ {"site_id": site_id, "inner_path": "%%/%s/%%" % auth_address}
+ )
+
+ self.log.debug(
+ "Filled file_optional table for %s in %.3fs (loaded: %s, is_pinned: %s)" %
+ (site.address, time.time() - s, num, res.rowcount)
+ )
+ self.filled[site.address] = True
+
+ def setContentFilesOptional(self, site, content_inner_path, content, cur=None):
+ if not cur:
+ cur = self
+
+ num = 0
+ site_id = self.site_ids[site.address]
+ content_inner_dir = helper.getDirname(content_inner_path)
+ for relative_inner_path, file in content.get("files_optional", {}).items():
+ file_inner_path = content_inner_dir + relative_inner_path
+ hash_id = int(file["sha512"][0:4], 16)
+ if hash_id in site.content_manager.hashfield:
+ is_downloaded = 1
+ else:
+ is_downloaded = 0
+ if site.address + "/" + content_inner_dir in self.my_optional_files:
+ is_pinned = 1
+ else:
+ is_pinned = 0
+ cur.insertOrUpdate("file_optional", {
+ "hash_id": hash_id,
+ "size": int(file["size"])
+ }, {
+ "site_id": site_id,
+ "inner_path": file_inner_path
+ }, oninsert={
+ "time_added": int(time.time()),
+ "time_downloaded": int(time.time()) if is_downloaded else 0,
+ "is_downloaded": is_downloaded,
+ "peer": is_downloaded,
+ "is_pinned": is_pinned
+ })
+ self.optional_files[site_id][file_inner_path[-8:]] = 1
+ num += 1
+
+ return num
+
+ def setContent(self, site, inner_path, content, size=0):
+ super(ContentDbPlugin, self).setContent(site, inner_path, content, size=size)
+ old_content = site.content_manager.contents.get(inner_path, {})
+ if (not self.need_filling or self.filled.get(site.address)) and ("files_optional" in content or "files_optional" in old_content):
+ self.setContentFilesOptional(site, inner_path, content)
+ # Check deleted files
+ if old_content:
+ old_files = old_content.get("files_optional", {}).keys()
+ new_files = content.get("files_optional", {}).keys()
+ content_inner_dir = helper.getDirname(inner_path)
+ deleted = [content_inner_dir + key for key in old_files if key not in new_files]
+ if deleted:
+ site_id = self.site_ids[site.address]
+ self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": deleted})
+
+ def deleteContent(self, site, inner_path):
+ content = site.content_manager.contents.get(inner_path)
+ if content and "files_optional" in content:
+ site_id = self.site_ids[site.address]
+ content_inner_dir = helper.getDirname(inner_path)
+ optional_inner_paths = [
+ content_inner_dir + relative_inner_path
+ for relative_inner_path in content.get("files_optional", {}).keys()
+ ]
+ self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": optional_inner_paths})
+ super(ContentDbPlugin, self).deleteContent(site, inner_path)
+
+ def updatePeerNumbers(self):
+ s = time.time()
+ num_file = 0
+ num_updated = 0
+ num_site = 0
+ for site in list(self.sites.values()):
+ if not site.content_manager.has_optional_files:
+ continue
+ if not site.isServing():
+ continue
+ has_updated_hashfield = next((
+ peer
+ for peer in site.peers.values()
+ if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated
+ ), None)
+
+ if not has_updated_hashfield and site.content_manager.hashfield.time_changed < self.time_peer_numbers_updated:
+ continue
+
+ hashfield_peers = itertools.chain.from_iterable(
+ peer.hashfield.storage
+ for peer in site.peers.values()
+ if peer.has_hashfield
+ )
+ peer_nums = collections.Counter(
+ itertools.chain(
+ hashfield_peers,
+ site.content_manager.hashfield
+ )
+ )
+
+ site_id = self.site_ids[site.address]
+ if not site_id:
+ continue
+
+ res = self.execute("SELECT file_id, hash_id, peer FROM file_optional WHERE ?", {"site_id": site_id})
+ updates = {}
+ for row in res:
+ peer_num = peer_nums.get(row["hash_id"], 0)
+ if peer_num != row["peer"]:
+ updates[row["file_id"]] = peer_num
+
+ for file_id, peer_num in updates.items():
+ self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id))
+
+ num_updated += len(updates)
+ num_file += len(peer_nums)
+ num_site += 1
+
+ self.time_peer_numbers_updated = time.time()
+ self.log.debug("%s/%s peer number for %s site updated in %.3fs" % (num_updated, num_file, num_site, time.time() - s))
+
+ def queryDeletableFiles(self):
+ # First return the files with atleast 10 seeder and not accessed in last week
+ query = """
+ SELECT * FROM file_optional
+ WHERE peer > 10 AND %s
+ ORDER BY time_accessed < %s DESC, uploaded / size
+ """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7))
+ limit_start = 0
+ while 1:
+ num = 0
+ res = self.execute("%s LIMIT %s, 50" % (query, limit_start))
+ for row in res:
+ yield row
+ num += 1
+ if num < 50:
+ break
+ limit_start += 50
+
+ self.log.debug("queryDeletableFiles returning less-seeded files")
+
+ # Then return files less seeder but still not accessed in last week
+ query = """
+ SELECT * FROM file_optional
+ WHERE peer <= 10 AND %s
+ ORDER BY peer DESC, time_accessed < %s DESC, uploaded / size
+ """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7))
+ limit_start = 0
+ while 1:
+ num = 0
+ res = self.execute("%s LIMIT %s, 50" % (query, limit_start))
+ for row in res:
+ yield row
+ num += 1
+ if num < 50:
+ break
+ limit_start += 50
+
+ self.log.debug("queryDeletableFiles returning everyting")
+
+ # At the end return all files
+ query = """
+ SELECT * FROM file_optional
+ WHERE peer <= 10 AND %s
+ ORDER BY peer DESC, time_accessed, uploaded / size
+ """ % self.getOptionalUsedWhere()
+ limit_start = 0
+ while 1:
+ num = 0
+ res = self.execute("%s LIMIT %s, 50" % (query, limit_start))
+ for row in res:
+ yield row
+ num += 1
+ if num < 50:
+ break
+ limit_start += 50
+
+ def getOptionalLimitBytes(self):
+ if config.optional_limit.endswith("%"):
+ limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit))
+ limit_bytes = helper.getFreeSpace() * (limit_percent / 100)
+ else:
+ limit_bytes = float(re.sub("[^0-9.]", "", config.optional_limit)) * 1024 * 1024 * 1024
+ return limit_bytes
+
+ def getOptionalUsedWhere(self):
+ maxsize = config.optional_limit_exclude_minsize * 1024 * 1024
+ query = "is_downloaded = 1 AND is_pinned = 0 AND size < %s" % maxsize
+
+ # Don't delete optional files from owned sites
+ my_site_ids = []
+ for address, site in self.sites.items():
+ if site.settings["own"]:
+ my_site_ids.append(str(self.site_ids[address]))
+
+ if my_site_ids:
+ query += " AND site_id NOT IN (%s)" % ", ".join(my_site_ids)
+ return query
+
+ def getOptionalUsedBytes(self):
+ size = self.execute("SELECT SUM(size) FROM file_optional WHERE %s" % self.getOptionalUsedWhere()).fetchone()[0]
+ if not size:
+ size = 0
+ return size
+
+ def getOptionalNeedDelete(self, size):
+ if config.optional_limit.endswith("%"):
+ limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit))
+ need_delete = size - ((helper.getFreeSpace() + size) * (limit_percent / 100))
+ else:
+ need_delete = size - self.getOptionalLimitBytes()
+ return need_delete
+
+ def checkOptionalLimit(self, limit=None):
+ if not limit:
+ limit = self.getOptionalLimitBytes()
+
+ if limit < 0:
+ self.log.debug("Invalid limit for optional files: %s" % limit)
+ return False
+
+ size = self.getOptionalUsedBytes()
+
+ need_delete = self.getOptionalNeedDelete(size)
+
+ self.log.debug(
+ "Optional size: %.1fMB/%.1fMB, Need delete: %.1fMB" %
+ (float(size) / 1024 / 1024, float(limit) / 1024 / 1024, float(need_delete) / 1024 / 1024)
+ )
+ if need_delete <= 0:
+ return False
+
+ self.updatePeerNumbers()
+
+ site_ids_reverse = {val: key for key, val in self.site_ids.items()}
+ deleted_file_ids = []
+ for row in self.queryDeletableFiles():
+ site_address = site_ids_reverse.get(row["site_id"])
+ site = self.sites.get(site_address)
+ if not site:
+ self.log.error("No site found for id: %s" % row["site_id"])
+ continue
+ site.log.debug("Deleting %s %.3f MB left" % (row["inner_path"], float(need_delete) / 1024 / 1024))
+ deleted_file_ids.append(row["file_id"])
+ try:
+ site.content_manager.optionalRemoved(row["inner_path"], row["hash_id"], row["size"])
+ site.storage.delete(row["inner_path"])
+ need_delete -= row["size"]
+ except Exception as err:
+ site.log.error("Error deleting %s: %s" % (row["inner_path"], err))
+
+ if need_delete <= 0:
+ break
+
+ cur = self.getCursor()
+ for file_id in deleted_file_ids:
+ cur.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"file_id": file_id})
+ cur.close()
+
+
+@PluginManager.registerTo("SiteManager")
+class SiteManagerPlugin(object):
+ def load(self, *args, **kwargs):
+ back = super(SiteManagerPlugin, self).load(*args, **kwargs)
+ if self.sites and not content_db.optional_files_loaded and content_db.conn:
+ content_db.optional_files_loaded = True
+ content_db.loadFilesOptional()
+ return back
\ No newline at end of file
diff --git a/plugins/OptionalManager/OptionalManagerPlugin.py b/plugins/OptionalManager/OptionalManagerPlugin.py
new file mode 100644
index 00000000..f01fab65
--- /dev/null
+++ b/plugins/OptionalManager/OptionalManagerPlugin.py
@@ -0,0 +1,253 @@
+import time
+import re
+import collections
+
+import gevent
+
+from util import helper
+from Plugin import PluginManager
+from . import ContentDbPlugin
+
+
+# We can only import plugin host clases after the plugins are loaded
+@PluginManager.afterLoad
+def importPluginnedClasses():
+ global config
+ from Config import config
+
+
+def processAccessLog():
+ global access_log
+ if access_log:
+ content_db = ContentDbPlugin.content_db
+ if not content_db.conn:
+ return False
+
+ s = time.time()
+ access_log_prev = access_log
+ access_log = collections.defaultdict(dict)
+ now = int(time.time())
+ num = 0
+ for site_id in access_log_prev:
+ content_db.execute(
+ "UPDATE file_optional SET time_accessed = %s WHERE ?" % now,
+ {"site_id": site_id, "inner_path": list(access_log_prev[site_id].keys())}
+ )
+ num += len(access_log_prev[site_id])
+
+ content_db.log.debug("Inserted %s web request stat in %.3fs" % (num, time.time() - s))
+
+
+def processRequestLog():
+ global request_log
+ if request_log:
+ content_db = ContentDbPlugin.content_db
+ if not content_db.conn:
+ return False
+
+ s = time.time()
+ request_log_prev = request_log
+ request_log = collections.defaultdict(lambda: collections.defaultdict(int)) # {site_id: {inner_path1: 1, inner_path2: 1...}}
+ num = 0
+ for site_id in request_log_prev:
+ for inner_path, uploaded in request_log_prev[site_id].items():
+ content_db.execute(
+ "UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded,
+ {"site_id": site_id, "inner_path": inner_path}
+ )
+ num += 1
+ content_db.log.debug("Inserted %s file request stat in %.3fs" % (num, time.time() - s))
+
+
+if "access_log" not in locals().keys(): # To keep between module reloads
+ access_log = collections.defaultdict(dict) # {site_id: {inner_path1: 1, inner_path2: 1...}}
+ request_log = collections.defaultdict(lambda: collections.defaultdict(int)) # {site_id: {inner_path1: 1, inner_path2: 1...}}
+ helper.timer(61, processAccessLog)
+ helper.timer(60, processRequestLog)
+
+
+@PluginManager.registerTo("ContentManager")
+class ContentManagerPlugin(object):
+ def __init__(self, *args, **kwargs):
+ self.cache_is_pinned = {}
+ super(ContentManagerPlugin, self).__init__(*args, **kwargs)
+
+ def optionalDownloaded(self, inner_path, hash_id, size=None, own=False):
+ if "|" in inner_path: # Big file piece
+ file_inner_path, file_range = inner_path.split("|")
+ else:
+ file_inner_path = inner_path
+
+ self.contents.db.executeDelayed(
+ "UPDATE file_optional SET time_downloaded = :now, is_downloaded = 1, peer = peer + 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 0",
+ {"now": int(time.time()), "site_id": self.contents.db.site_ids[self.site.address], "inner_path": file_inner_path}
+ )
+
+ return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own)
+
+ def optionalRemoved(self, inner_path, hash_id, size=None):
+ res = self.contents.db.execute(
+ "UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 1",
+ {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path}
+ )
+
+ if res.rowcount > 0:
+ back = super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size)
+ # Re-add to hashfield if we have other file with the same hash_id
+ if self.isDownloaded(hash_id=hash_id, force_check_db=True):
+ self.hashfield.appendHashId(hash_id)
+ else:
+ back = False
+ self.cache_is_pinned = {}
+ return back
+
+ def optionalRenamed(self, inner_path_old, inner_path_new):
+ back = super(ContentManagerPlugin, self).optionalRenamed(inner_path_old, inner_path_new)
+ self.cache_is_pinned = {}
+ self.contents.db.execute(
+ "UPDATE file_optional SET inner_path = :inner_path_new WHERE site_id = :site_id AND inner_path = :inner_path_old",
+ {"site_id": self.contents.db.site_ids[self.site.address], "inner_path_old": inner_path_old, "inner_path_new": inner_path_new}
+ )
+ return back
+
+ def isDownloaded(self, inner_path=None, hash_id=None, force_check_db=False):
+ if hash_id and not force_check_db and hash_id not in self.hashfield:
+ return False
+
+ if inner_path:
+ res = self.contents.db.execute(
+ "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1",
+ {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path}
+ )
+ else:
+ res = self.contents.db.execute(
+ "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND hash_id = :hash_id AND is_downloaded = 1 LIMIT 1",
+ {"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id}
+ )
+ row = res.fetchone()
+ if row and row["is_downloaded"]:
+ return True
+ else:
+ return False
+
+ def isPinned(self, inner_path):
+ if inner_path in self.cache_is_pinned:
+ self.site.log.debug("Cached is pinned: %s" % inner_path)
+ return self.cache_is_pinned[inner_path]
+
+ res = self.contents.db.execute(
+ "SELECT is_pinned FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1",
+ {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path}
+ )
+ row = res.fetchone()
+
+ if row and row[0]:
+ is_pinned = True
+ else:
+ is_pinned = False
+
+ self.cache_is_pinned[inner_path] = is_pinned
+ self.site.log.debug("Cache set is pinned: %s %s" % (inner_path, is_pinned))
+
+ return is_pinned
+
+ def setPin(self, inner_path, is_pinned):
+ content_db = self.contents.db
+ site_id = content_db.site_ids[self.site.address]
+ content_db.execute("UPDATE file_optional SET is_pinned = %d WHERE ?" % is_pinned, {"site_id": site_id, "inner_path": inner_path})
+ self.cache_is_pinned = {}
+
+ def optionalDelete(self, inner_path):
+ if self.isPinned(inner_path):
+ self.site.log.debug("Skip deleting pinned optional file: %s" % inner_path)
+ return False
+ else:
+ return super(ContentManagerPlugin, self).optionalDelete(inner_path)
+
+
+@PluginManager.registerTo("WorkerManager")
+class WorkerManagerPlugin(object):
+ def doneTask(self, task):
+ super(WorkerManagerPlugin, self).doneTask(task)
+
+ if task["optional_hash_id"] and not self.tasks: # Execute delayed queries immedietly after tasks finished
+ ContentDbPlugin.content_db.processDelayed()
+
+
+@PluginManager.registerTo("UiRequest")
+class UiRequestPlugin(object):
+ def parsePath(self, path):
+ global access_log
+ path_parts = super(UiRequestPlugin, self).parsePath(path)
+ if path_parts:
+ site_id = ContentDbPlugin.content_db.site_ids.get(path_parts["request_address"])
+ if site_id:
+ if ContentDbPlugin.content_db.isOptionalFile(site_id, path_parts["inner_path"]):
+ access_log[site_id][path_parts["inner_path"]] = 1
+ return path_parts
+
+
+@PluginManager.registerTo("FileRequest")
+class FileRequestPlugin(object):
+ def actionGetFile(self, params):
+ stats = super(FileRequestPlugin, self).actionGetFile(params)
+ self.recordFileRequest(params["site"], params["inner_path"], stats)
+ return stats
+
+ def actionStreamFile(self, params):
+ stats = super(FileRequestPlugin, self).actionStreamFile(params)
+ self.recordFileRequest(params["site"], params["inner_path"], stats)
+ return stats
+
+ def recordFileRequest(self, site_address, inner_path, stats):
+ if not stats:
+ # Only track the last request of files
+ return False
+ site_id = ContentDbPlugin.content_db.site_ids[site_address]
+ if site_id and ContentDbPlugin.content_db.isOptionalFile(site_id, inner_path):
+ request_log[site_id][inner_path] += stats["bytes_sent"]
+
+
+@PluginManager.registerTo("Site")
+class SitePlugin(object):
+ def isDownloadable(self, inner_path):
+ is_downloadable = super(SitePlugin, self).isDownloadable(inner_path)
+ if is_downloadable:
+ return is_downloadable
+
+ for path in self.settings.get("optional_help", {}).keys():
+ if inner_path.startswith(path):
+ return True
+
+ return False
+
+ def fileForgot(self, inner_path):
+ if "|" in inner_path and self.content_manager.isPinned(re.sub(r"\|.*", "", inner_path)):
+ self.log.debug("File %s is pinned, no fileForgot" % inner_path)
+ return False
+ else:
+ return super(SitePlugin, self).fileForgot(inner_path)
+
+ def fileDone(self, inner_path):
+ if "|" in inner_path and self.bad_files.get(inner_path, 0) > 5: # Idle optional file done
+ inner_path_file = re.sub(r"\|.*", "", inner_path)
+ num_changed = 0
+ for key, val in self.bad_files.items():
+ if key.startswith(inner_path_file) and val > 1:
+ self.bad_files[key] = 1
+ num_changed += 1
+ self.log.debug("Idle optional file piece done, changed retry number of %s pieces." % num_changed)
+ if num_changed:
+ gevent.spawn(self.retryBadFiles)
+
+ return super(SitePlugin, self).fileDone(inner_path)
+
+
+@PluginManager.registerTo("ConfigPlugin")
+class ConfigPlugin(object):
+ def createArguments(self):
+ group = self.parser.add_argument_group("OptionalManager plugin")
+ group.add_argument('--optional_limit', help='Limit total size of optional files', default="10%", metavar="GB or free space %")
+ group.add_argument('--optional_limit_exclude_minsize', help='Exclude files larger than this limit from optional size limit calculation', default=20, metavar="MB", type=int)
+
+ return super(ConfigPlugin, self).createArguments()
diff --git a/plugins/OptionalManager/Test/TestOptionalManager.py b/plugins/OptionalManager/Test/TestOptionalManager.py
new file mode 100644
index 00000000..4bd44695
--- /dev/null
+++ b/plugins/OptionalManager/Test/TestOptionalManager.py
@@ -0,0 +1,158 @@
+import copy
+
+import pytest
+
+
+@pytest.mark.usefixtures("resetSettings")
+class TestOptionalManager:
+ def testDbFill(self, site):
+ contents = site.content_manager.contents
+ assert len(site.content_manager.hashfield) > 0
+ assert contents.db.execute("SELECT COUNT(*) FROM file_optional WHERE is_downloaded = 1").fetchone()[0] == len(site.content_manager.hashfield)
+
+ def testSetContent(self, site):
+ contents = site.content_manager.contents
+
+ # Add new file
+ new_content = copy.deepcopy(contents["content.json"])
+ new_content["files_optional"]["testfile"] = {
+ "size": 1234,
+ "sha512": "aaaabbbbcccc"
+ }
+ num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0]
+ contents["content.json"] = new_content
+ assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] > num_optional_files_before
+
+ # Remove file
+ new_content = copy.deepcopy(contents["content.json"])
+ del new_content["files_optional"]["testfile"]
+ num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0]
+ contents["content.json"] = new_content
+ assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before
+
+ def testDeleteContent(self, site):
+ contents = site.content_manager.contents
+ num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0]
+ del contents["content.json"]
+ assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before
+
+ def testVerifyFiles(self, site):
+ contents = site.content_manager.contents
+
+ # Add new file
+ new_content = copy.deepcopy(contents["content.json"])
+ new_content["files_optional"]["testfile"] = {
+ "size": 1234,
+ "sha512": "aaaabbbbcccc"
+ }
+ contents["content.json"] = new_content
+ file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone()
+ assert not file_row["is_downloaded"]
+
+ # Write file from outside of ZeroNet
+ site.storage.open("testfile", "wb").write(b"A" * 1234) # For quick check hash does not matter only file size
+
+ hashfield_len_before = len(site.content_manager.hashfield)
+ site.storage.verifyFiles(quick_check=True)
+ assert len(site.content_manager.hashfield) == hashfield_len_before + 1
+
+ file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone()
+ assert file_row["is_downloaded"]
+
+ # Delete file outside of ZeroNet
+ site.storage.delete("testfile")
+ site.storage.verifyFiles(quick_check=True)
+ file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone()
+ assert not file_row["is_downloaded"]
+
+ def testVerifyFilesSameHashId(self, site):
+ contents = site.content_manager.contents
+
+ new_content = copy.deepcopy(contents["content.json"])
+
+ # Add two files with same hashid (first 4 character)
+ new_content["files_optional"]["testfile1"] = {
+ "size": 1234,
+ "sha512": "aaaabbbbcccc"
+ }
+ new_content["files_optional"]["testfile2"] = {
+ "size": 2345,
+ "sha512": "aaaabbbbdddd"
+ }
+ contents["content.json"] = new_content
+
+ assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd")
+
+ # Write files from outside of ZeroNet (For quick check hash does not matter only file size)
+ site.storage.open("testfile1", "wb").write(b"A" * 1234)
+ site.storage.open("testfile2", "wb").write(b"B" * 2345)
+
+ site.storage.verifyFiles(quick_check=True)
+
+ # Make sure that both is downloaded
+ assert site.content_manager.isDownloaded("testfile1")
+ assert site.content_manager.isDownloaded("testfile2")
+ assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") in site.content_manager.hashfield
+
+ # Delete one of the files
+ site.storage.delete("testfile1")
+ site.storage.verifyFiles(quick_check=True)
+ assert not site.content_manager.isDownloaded("testfile1")
+ assert site.content_manager.isDownloaded("testfile2")
+ assert site.content_manager.hashfield.getHashId("aaaabbbbdddd") in site.content_manager.hashfield
+
+ def testIsPinned(self, site):
+ assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png")
+ site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
+ assert site.content_manager.isPinned("data/img/zerotalk-upvote.png")
+
+ assert len(site.content_manager.cache_is_pinned) == 1
+ site.content_manager.cache_is_pinned = {}
+ assert site.content_manager.isPinned("data/img/zerotalk-upvote.png")
+
+ def testBigfilePieceReset(self, site):
+ site.bad_files = {
+ "data/fake_bigfile.mp4|0-1024": 10,
+ "data/fake_bigfile.mp4|1024-2048": 10,
+ "data/fake_bigfile.mp4|2048-3064": 10
+ }
+ site.onFileDone("data/fake_bigfile.mp4|0-1024")
+ assert site.bad_files["data/fake_bigfile.mp4|1024-2048"] == 1
+ assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1
+
+ def testOptionalDelete(self, site):
+ contents = site.content_manager.contents
+
+ site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
+ site.content_manager.setPin("data/img/zeroid.png", False)
+ new_content = copy.deepcopy(contents["content.json"])
+ del new_content["files_optional"]["data/img/zerotalk-upvote.png"]
+ del new_content["files_optional"]["data/img/zeroid.png"]
+
+ assert site.storage.isFile("data/img/zerotalk-upvote.png")
+ assert site.storage.isFile("data/img/zeroid.png")
+
+ site.storage.writeJson("content.json", new_content)
+ site.content_manager.loadContent("content.json", force=True)
+
+ assert not site.storage.isFile("data/img/zeroid.png")
+ assert site.storage.isFile("data/img/zerotalk-upvote.png")
+
+ def testOptionalRename(self, site):
+ contents = site.content_manager.contents
+
+ site.content_manager.setPin("data/img/zerotalk-upvote.png", True)
+ new_content = copy.deepcopy(contents["content.json"])
+ new_content["files_optional"]["data/img/zerotalk-upvote-new.png"] = new_content["files_optional"]["data/img/zerotalk-upvote.png"]
+ del new_content["files_optional"]["data/img/zerotalk-upvote.png"]
+
+ assert site.storage.isFile("data/img/zerotalk-upvote.png")
+ assert site.content_manager.isPinned("data/img/zerotalk-upvote.png")
+
+ site.storage.writeJson("content.json", new_content)
+ site.content_manager.loadContent("content.json", force=True)
+
+ assert not site.storage.isFile("data/img/zerotalk-upvote.png")
+ assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png")
+ assert site.content_manager.isPinned("data/img/zerotalk-upvote-new.png")
+ assert site.storage.isFile("data/img/zerotalk-upvote-new.png")
diff --git a/plugins/OptionalManager/Test/conftest.py b/plugins/OptionalManager/Test/conftest.py
new file mode 100644
index 00000000..8c1df5b2
--- /dev/null
+++ b/plugins/OptionalManager/Test/conftest.py
@@ -0,0 +1 @@
+from src.Test.conftest import *
\ No newline at end of file
diff --git a/plugins/OptionalManager/Test/pytest.ini b/plugins/OptionalManager/Test/pytest.ini
new file mode 100644
index 00000000..d09210d1
--- /dev/null
+++ b/plugins/OptionalManager/Test/pytest.ini
@@ -0,0 +1,5 @@
+[pytest]
+python_files = Test*.py
+addopts = -rsxX -v --durations=6
+markers =
+ webtest: mark a test as a webtest.
\ No newline at end of file
diff --git a/plugins/OptionalManager/UiWebsocketPlugin.py b/plugins/OptionalManager/UiWebsocketPlugin.py
new file mode 100644
index 00000000..0acc53cf
--- /dev/null
+++ b/plugins/OptionalManager/UiWebsocketPlugin.py
@@ -0,0 +1,396 @@
+import re
+import time
+import html
+import os
+
+import gevent
+
+from Plugin import PluginManager
+from Config import config
+from util import helper
+from util.Flag import flag
+from Translate import Translate
+
+
+plugin_dir = os.path.dirname(__file__)
+
+if "_" not in locals():
+ _ = Translate(plugin_dir + "/languages/")
+
+bigfile_sha512_cache = {}
+
+
+@PluginManager.registerTo("UiWebsocket")
+class UiWebsocketPlugin(object):
+ def __init__(self, *args, **kwargs):
+ self.time_peer_numbers_updated = 0
+ super(UiWebsocketPlugin, self).__init__(*args, **kwargs)
+
+ def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs):
+ # Add file to content.db and set it as pinned
+ content_db = self.site.content_manager.contents.db
+ content_inner_dir = helper.getDirname(inner_path)
+ content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time()
+ if len(content_db.my_optional_files) > 50: # Keep only last 50
+ oldest_key = min(
+ iter(content_db.my_optional_files.keys()),
+ key=(lambda key: content_db.my_optional_files[key])
+ )
+ del content_db.my_optional_files[oldest_key]
+
+ return super(UiWebsocketPlugin, self).actionSiteSign(to, privatekey, inner_path, *args, **kwargs)
+
+ def updatePeerNumbers(self):
+ self.site.updateHashfield()
+ content_db = self.site.content_manager.contents.db
+ content_db.updatePeerNumbers()
+ self.site.updateWebsocket(peernumber_updated=True)
+
+ def addBigfileInfo(self, row):
+ global bigfile_sha512_cache
+
+ content_db = self.site.content_manager.contents.db
+ site = content_db.sites[row["address"]]
+ if not site.settings.get("has_bigfile"):
+ return False
+
+ file_key = row["address"] + "/" + row["inner_path"]
+ sha512 = bigfile_sha512_cache.get(file_key)
+ file_info = None
+ if not sha512:
+ file_info = site.content_manager.getFileInfo(row["inner_path"])
+ if not file_info or not file_info.get("piece_size"):
+ return False
+ sha512 = file_info["sha512"]
+ bigfile_sha512_cache[file_key] = sha512
+
+ if sha512 in site.storage.piecefields:
+ piecefield = site.storage.piecefields[sha512].tobytes()
+ else:
+ piecefield = None
+
+ if piecefield:
+ row["pieces"] = len(piecefield)
+ row["pieces_downloaded"] = piecefield.count(b"\x01")
+ row["downloaded_percent"] = 100 * row["pieces_downloaded"] / row["pieces"]
+ if row["pieces_downloaded"]:
+ if row["pieces"] == row["pieces_downloaded"]:
+ row["bytes_downloaded"] = row["size"]
+ else:
+ if not file_info:
+ file_info = site.content_manager.getFileInfo(row["inner_path"])
+ row["bytes_downloaded"] = row["pieces_downloaded"] * file_info.get("piece_size", 0)
+ else:
+ row["bytes_downloaded"] = 0
+
+ row["is_downloading"] = bool(next((inner_path for inner_path in site.bad_files if inner_path.startswith(row["inner_path"])), False))
+
+ # Add leech / seed stats
+ row["peer_seed"] = 0
+ row["peer_leech"] = 0
+ for peer in site.peers.values():
+ if not peer.time_piecefields_updated or sha512 not in peer.piecefields:
+ continue
+ peer_piecefield = peer.piecefields[sha512].tobytes()
+ if not peer_piecefield:
+ continue
+ if peer_piecefield == b"\x01" * len(peer_piecefield):
+ row["peer_seed"] += 1
+ else:
+ row["peer_leech"] += 1
+
+ # Add myself
+ if piecefield:
+ if row["pieces_downloaded"] == row["pieces"]:
+ row["peer_seed"] += 1
+ else:
+ row["peer_leech"] += 1
+
+ return True
+
+ # Optional file functions
+
+ def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10, filter="downloaded", filter_inner_path=None):
+ if not address:
+ address = self.site.address
+
+ # Update peer numbers if necessary
+ content_db = self.site.content_manager.contents.db
+ if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5:
+ # Start in new thread to avoid blocking
+ self.time_peer_numbers_updated = time.time()
+ gevent.spawn(self.updatePeerNumbers)
+
+ if address == "all" and "ADMIN" not in self.permissions:
+ return self.response(to, {"error": "Forbidden"})
+
+ if not self.hasSitePermission(address):
+ return self.response(to, {"error": "Forbidden"})
+
+ if not all([re.match("^[a-z_*/+-]+( DESC| ASC|)$", part.strip()) for part in orderby.split(",")]):
+ return self.response(to, "Invalid order_by")
+
+ if type(limit) != int:
+ return self.response(to, "Invalid limit")
+
+ back = []
+ content_db = self.site.content_manager.contents.db
+
+ wheres = {}
+ wheres_raw = []
+ if "bigfile" in filter:
+ wheres["size >"] = 1024 * 1024 * 1
+ if "downloaded" in filter:
+ wheres_raw.append("(is_downloaded = 1 OR is_pinned = 1)")
+ if "pinned" in filter:
+ wheres["is_pinned"] = 1
+ if filter_inner_path:
+ wheres["inner_path__like"] = filter_inner_path
+
+ if address == "all":
+ join = "LEFT JOIN site USING (site_id)"
+ else:
+ wheres["site_id"] = content_db.site_ids[address]
+ join = ""
+
+ if wheres_raw:
+ query_wheres_raw = "AND" + " AND ".join(wheres_raw)
+ else:
+ query_wheres_raw = ""
+
+ query = "SELECT * FROM file_optional %s WHERE ? %s ORDER BY %s LIMIT %s" % (join, query_wheres_raw, orderby, limit)
+
+ for row in content_db.execute(query, wheres):
+ row = dict(row)
+ if address != "all":
+ row["address"] = address
+
+ if row["size"] > 1024 * 1024:
+ has_bigfile_info = self.addBigfileInfo(row)
+ else:
+ has_bigfile_info = False
+
+ if not has_bigfile_info and "bigfile" in filter:
+ continue
+
+ if not has_bigfile_info:
+ if row["is_downloaded"]:
+ row["bytes_downloaded"] = row["size"]
+ row["downloaded_percent"] = 100
+ else:
+ row["bytes_downloaded"] = 0
+ row["downloaded_percent"] = 0
+
+ back.append(row)
+ self.response(to, back)
+
+ def actionOptionalFileInfo(self, to, inner_path):
+ content_db = self.site.content_manager.contents.db
+ site_id = content_db.site_ids[self.site.address]
+
+ # Update peer numbers if necessary
+ if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5:
+ # Start in new thread to avoid blocking
+ self.time_peer_numbers_updated = time.time()
+ gevent.spawn(self.updatePeerNumbers)
+
+ query = "SELECT * FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1"
+ res = content_db.execute(query, {"site_id": site_id, "inner_path": inner_path})
+ row = next(res, None)
+ if row:
+ row = dict(row)
+ if row["size"] > 1024 * 1024:
+ row["address"] = self.site.address
+ self.addBigfileInfo(row)
+ self.response(to, row)
+ else:
+ self.response(to, None)
+
+ def setPin(self, inner_path, is_pinned, address=None):
+ if not address:
+ address = self.site.address
+
+ if not self.hasSitePermission(address):
+ return {"error": "Forbidden"}
+
+ site = self.server.sites[address]
+ site.content_manager.setPin(inner_path, is_pinned)
+
+ return "ok"
+
+ @flag.no_multiuser
+ def actionOptionalFilePin(self, to, inner_path, address=None):
+ if type(inner_path) is not list:
+ inner_path = [inner_path]
+ back = self.setPin(inner_path, 1, address)
+ num_file = len(inner_path)
+ if back == "ok":
+ if num_file == 1:
+ self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
+ else:
+ self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000])
+ self.response(to, back)
+
+ @flag.no_multiuser
+ def actionOptionalFileUnpin(self, to, inner_path, address=None):
+ if type(inner_path) is not list:
+ inner_path = [inner_path]
+ back = self.setPin(inner_path, 0, address)
+ num_file = len(inner_path)
+ if back == "ok":
+ if num_file == 1:
+ self.cmd("notification", ["done", _["Removed pin from %s"] % html.escape(helper.getFilename(inner_path[0])), 5000])
+ else:
+ self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000])
+ self.response(to, back)
+
+ @flag.no_multiuser
+ def actionOptionalFileDelete(self, to, inner_path, address=None):
+ if not address:
+ address = self.site.address
+
+ if not self.hasSitePermission(address):
+ return self.response(to, {"error": "Forbidden"})
+
+ site = self.server.sites[address]
+
+ content_db = site.content_manager.contents.db
+ site_id = content_db.site_ids[site.address]
+
+ res = content_db.execute("SELECT * FROM file_optional WHERE ? LIMIT 1", {"site_id": site_id, "inner_path": inner_path, "is_downloaded": 1})
+ row = next(res, None)
+
+ if not row:
+ return self.response(to, {"error": "Not found in content.db"})
+
+ removed = site.content_manager.optionalRemoved(inner_path, row["hash_id"], row["size"])
+ # if not removed:
+ # return self.response(to, {"error": "Not found in hash_id: %s" % row["hash_id"]})
+
+ content_db.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"site_id": site_id, "inner_path": inner_path})
+
+ try:
+ site.storage.delete(inner_path)
+ except Exception as err:
+ return self.response(to, {"error": "File delete error: %s" % err})
+ site.updateWebsocket(file_delete=inner_path)
+
+ if inner_path in site.content_manager.cache_is_pinned:
+ site.content_manager.cache_is_pinned = {}
+
+ self.response(to, "ok")
+
+ # Limit functions
+
+ @flag.admin
+ def actionOptionalLimitStats(self, to):
+ back = {}
+ back["limit"] = config.optional_limit
+ back["used"] = self.site.content_manager.contents.db.getOptionalUsedBytes()
+ back["free"] = helper.getFreeSpace()
+
+ self.response(to, back)
+
+ @flag.no_multiuser
+ @flag.admin
+ def actionOptionalLimitSet(self, to, limit):
+ config.optional_limit = re.sub(r"\.0+$", "", limit) # Remove unnecessary digits from end
+ config.saveValue("optional_limit", limit)
+ self.response(to, "ok")
+
+ # Distribute help functions
+
+ def actionOptionalHelpList(self, to, address=None):
+ if not address:
+ address = self.site.address
+
+ if not self.hasSitePermission(address):
+ return self.response(to, {"error": "Forbidden"})
+
+ site = self.server.sites[address]
+
+ self.response(to, site.settings.get("optional_help", {}))
+
+ @flag.no_multiuser
+ def actionOptionalHelp(self, to, directory, title, address=None):
+ if not address:
+ address = self.site.address
+
+ if not self.hasSitePermission(address):
+ return self.response(to, {"error": "Forbidden"})
+
+ site = self.server.sites[address]
+ content_db = site.content_manager.contents.db
+ site_id = content_db.site_ids[address]
+
+ if "optional_help" not in site.settings:
+ site.settings["optional_help"] = {}
+
+ stats = content_db.execute(
+ "SELECT COUNT(*) AS num, SUM(size) AS size FROM file_optional WHERE site_id = :site_id AND inner_path LIKE :inner_path",
+ {"site_id": site_id, "inner_path": directory + "%"}
+ ).fetchone()
+ stats = dict(stats)
+
+ if not stats["size"]:
+ stats["size"] = 0
+ if not stats["num"]:
+ stats["num"] = 0
+
+ self.cmd("notification", [
+ "done",
+ _["You started to help distribute %s.
Directory: %s"] %
+ (html.escape(title), html.escape(directory)),
+ 10000
+ ])
+
+ site.settings["optional_help"][directory] = title
+
+ self.response(to, dict(stats))
+
+ @flag.no_multiuser
+ def actionOptionalHelpRemove(self, to, directory, address=None):
+ if not address:
+ address = self.site.address
+
+ if not self.hasSitePermission(address):
+ return self.response(to, {"error": "Forbidden"})
+
+ site = self.server.sites[address]
+
+ try:
+ del site.settings["optional_help"][directory]
+ self.response(to, "ok")
+ except Exception:
+ self.response(to, {"error": "Not found"})
+
+ def cbOptionalHelpAll(self, to, site, value):
+ site.settings["autodownloadoptional"] = value
+ self.response(to, value)
+
+ @flag.no_multiuser
+ def actionOptionalHelpAll(self, to, value, address=None):
+ if not address:
+ address = self.site.address
+
+ if not self.hasSitePermission(address):
+ return self.response(to, {"error": "Forbidden"})
+
+ site = self.server.sites[address]
+
+ if value:
+ if "ADMIN" in self.site.settings["permissions"]:
+ self.cbOptionalHelpAll(to, site, True)
+ else:
+ site_title = site.content_manager.contents["content.json"].get("title", address)
+ self.cmd(
+ "confirm",
+ [
+ _["Help distribute all new optional files on site %s"] % html.escape(site_title),
+ _["Yes, I want to help!"]
+ ],
+ lambda res: self.cbOptionalHelpAll(to, site, True)
+ )
+ else:
+ site.settings["autodownloadoptional"] = False
+ self.response(to, False)
diff --git a/plugins/OptionalManager/__init__.py b/plugins/OptionalManager/__init__.py
new file mode 100644
index 00000000..77b8c348
--- /dev/null
+++ b/plugins/OptionalManager/__init__.py
@@ -0,0 +1,2 @@
+from . import OptionalManagerPlugin
+from . import UiWebsocketPlugin
diff --git a/plugins/OptionalManager/languages/es.json b/plugins/OptionalManager/languages/es.json
new file mode 100644
index 00000000..32ae46ae
--- /dev/null
+++ b/plugins/OptionalManager/languages/es.json
@@ -0,0 +1,7 @@
+{
+ "Pinned %s files": "Archivos %s fijados",
+ "Removed pin from %s files": "Archivos %s que no estan fijados",
+ "You started to help distribute %s.
Directory: %s": "Tu empezaste a ayudar a distribuir %s.
Directorio: %s",
+ "Help distribute all new optional files on site %s": "Ayude a distribuir todos los archivos opcionales en el sitio %s",
+ "Yes, I want to help!": "¡Si, yo quiero ayudar!"
+}
diff --git a/plugins/OptionalManager/languages/fr.json b/plugins/OptionalManager/languages/fr.json
new file mode 100644
index 00000000..47a563dc
--- /dev/null
+++ b/plugins/OptionalManager/languages/fr.json
@@ -0,0 +1,7 @@
+{
+ "Pinned %s files": "Fichiers %s épinglés",
+ "Removed pin from %s files": "Fichiers %s ne sont plus épinglés",
+ "You started to help distribute %s.
Directory: %s": "Vous avez commencé à aider à distribuer %s.
Dossier : %s",
+ "Help distribute all new optional files on site %s": "Aider à distribuer tous les fichiers optionnels du site %s",
+ "Yes, I want to help!": "Oui, je veux aider !"
+}
diff --git a/plugins/OptionalManager/languages/hu.json b/plugins/OptionalManager/languages/hu.json
new file mode 100644
index 00000000..7a23b86c
--- /dev/null
+++ b/plugins/OptionalManager/languages/hu.json
@@ -0,0 +1,7 @@
+{
+ "Pinned %s files": "%s fájl rögzítve",
+ "Removed pin from %s files": "%s fájl rögzítés eltávolítva",
+ "You started to help distribute %s.
Directory: %s": "Új segítség a terjesztésben: %s.
Könyvtár: %s",
+ "Help distribute all new optional files on site %s": "Segítség az összes új opcionális fájl terjesztésében az %s oldalon",
+ "Yes, I want to help!": "Igen, segíteni akarok!"
+}
diff --git a/plugins/OptionalManager/languages/jp.json b/plugins/OptionalManager/languages/jp.json
new file mode 100644
index 00000000..af6dc79e
--- /dev/null
+++ b/plugins/OptionalManager/languages/jp.json
@@ -0,0 +1,7 @@
+{
+ "Pinned %s files": "%s 件のファイルを固定",
+ "Removed pin from %s files": "%s 件のファイルの固定を解除",
+ "You started to help distribute %s.
Directory: %s": "あなたはサイト: %s の配布の援助を開始しました。
ディレクトリ: %s",
+ "Help distribute all new optional files on site %s": "サイト: %s のすべての新しいオプションファイルの配布を援助しますか?",
+ "Yes, I want to help!": "はい、やります!"
+}
diff --git a/plugins/OptionalManager/languages/pt-br.json b/plugins/OptionalManager/languages/pt-br.json
new file mode 100644
index 00000000..21d90cc0
--- /dev/null
+++ b/plugins/OptionalManager/languages/pt-br.json
@@ -0,0 +1,7 @@
+{
+ "Pinned %s files": "Arquivos %s fixados",
+ "Removed pin from %s files": "Arquivos %s não estão fixados",
+ "You started to help distribute %s.
Directory: %s": "Você começou a ajudar a distribuir %s.
Pasta: %s",
+ "Help distribute all new optional files on site %s": "Ajude a distribuir todos os novos arquivos opcionais no site %s",
+ "Yes, I want to help!": "Sim, eu quero ajudar!"
+}
diff --git a/plugins/OptionalManager/languages/zh-tw.json b/plugins/OptionalManager/languages/zh-tw.json
new file mode 100644
index 00000000..dfa9eaf3
--- /dev/null
+++ b/plugins/OptionalManager/languages/zh-tw.json
@@ -0,0 +1,7 @@
+{
+ "Pinned %s files": "已固定 %s 個檔",
+ "Removed pin from %s files": "已解除固定 %s 個檔",
+ "You started to help distribute %s.
Directory: %s": "你已經開始幫助分發 %s 。
目錄:%s",
+ "Help distribute all new optional files on site %s": "你想要幫助分發 %s 網站的所有檔嗎?",
+ "Yes, I want to help!": "是,我想要幫助!"
+}
diff --git a/plugins/OptionalManager/languages/zh.json b/plugins/OptionalManager/languages/zh.json
new file mode 100644
index 00000000..ae18118e
--- /dev/null
+++ b/plugins/OptionalManager/languages/zh.json
@@ -0,0 +1,7 @@
+{
+ "Pinned %s files": "已固定 %s 个文件",
+ "Removed pin from %s files": "已解除固定 %s 个文件",
+ "You started to help distribute %s.
Directory: %s": "您已经开始帮助分发 %s 。
目录:%s",
+ "Help distribute all new optional files on site %s": "您想要帮助分发 %s 站点的所有文件吗?",
+ "Yes, I want to help!": "是,我想要帮助!"
+}
diff --git a/plugins/PeerDb/PeerDbPlugin.py b/plugins/PeerDb/PeerDbPlugin.py
new file mode 100644
index 00000000..aea11fbb
--- /dev/null
+++ b/plugins/PeerDb/PeerDbPlugin.py
@@ -0,0 +1,108 @@
+import time
+import sqlite3
+import random
+import atexit
+
+import gevent
+from Plugin import PluginManager
+
+
+@PluginManager.registerTo("ContentDb")
+class ContentDbPlugin(object):
+ def __init__(self, *args, **kwargs):
+ atexit.register(self.saveAllPeers)
+ super(ContentDbPlugin, self).__init__(*args, **kwargs)
+
+ def getSchema(self):
+ schema = super(ContentDbPlugin, self).getSchema()
+
+ schema["tables"]["peer"] = {
+ "cols": [
+ ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"],
+ ["address", "TEXT NOT NULL"],
+ ["port", "INTEGER NOT NULL"],
+ ["hashfield", "BLOB"],
+ ["reputation", "INTEGER NOT NULL"],
+ ["time_added", "INTEGER NOT NULL"],
+ ["time_found", "INTEGER NOT NULL"],
+ ["time_response", "INTEGER NOT NULL"],
+ ["connection_error", "INTEGER NOT NULL"]
+ ],
+ "indexes": [
+ "CREATE UNIQUE INDEX peer_key ON peer (site_id, address, port)"
+ ],
+ "schema_changed": 3
+ }
+
+ return schema
+
+ def loadPeers(self, site):
+ s = time.time()
+ site_id = self.site_ids.get(site.address)
+ res = self.execute("SELECT * FROM peer WHERE site_id = :site_id", {"site_id": site_id})
+ num = 0
+ num_hashfield = 0
+ for row in res:
+ peer = site.addPeer(str(row["address"]), row["port"])
+ if not peer: # Already exist
+ continue
+ if row["hashfield"]:
+ peer.hashfield.replaceFromBytes(row["hashfield"])
+ num_hashfield += 1
+ peer.time_added = row["time_added"]
+ peer.time_found = row["time_found"]
+ peer.time_found = row["time_found"]
+ peer.time_response = row["time_response"]
+ peer.connection_error = row["connection_error"]
+ if row["address"].endswith(".onion"):
+ # Onion peers less likely working
+ if peer.reputation > 0:
+ peer.reputation = peer.reputation / 2
+ else:
+ peer.reputation -= 1
+ num += 1
+ if num_hashfield:
+ site.content_manager.has_optional_files = True
+ site.log.debug("%s peers (%s with hashfield) loaded in %.3fs" % (num, num_hashfield, time.time() - s))
+
+ def iteratePeers(self, site):
+ site_id = self.site_ids.get(site.address)
+ for key, peer in list(site.peers.items()):
+ address, port = key.rsplit(":", 1)
+ if peer.has_hashfield:
+ hashfield = sqlite3.Binary(peer.hashfield.tobytes())
+ else:
+ hashfield = ""
+ yield (site_id, address, port, hashfield, peer.reputation, int(peer.time_added), int(peer.time_found), int(peer.time_response), int(peer.connection_error))
+
+ def savePeers(self, site, spawn=False):
+ if spawn:
+ # Save peers every hour (+random some secs to not update very site at same time)
+ site.greenlet_manager.spawnLater(60 * 60 + random.randint(0, 60), self.savePeers, site, spawn=True)
+ if not site.peers:
+ site.log.debug("Peers not saved: No peers found")
+ return
+ s = time.time()
+ site_id = self.site_ids.get(site.address)
+ cur = self.getCursor()
+ try:
+ cur.execute("DELETE FROM peer WHERE site_id = :site_id", {"site_id": site_id})
+ cur.executemany(
+ "INSERT INTO peer (site_id, address, port, hashfield, reputation, time_added, time_found, time_response, connection_error) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
+ self.iteratePeers(site)
+ )
+ except Exception as err:
+ site.log.error("Save peer error: %s" % err)
+ site.log.debug("Peers saved in %.3fs" % (time.time() - s))
+
+ def initSite(self, site):
+ super(ContentDbPlugin, self).initSite(site)
+ site.greenlet_manager.spawnLater(0.5, self.loadPeers, site)
+ site.greenlet_manager.spawnLater(60*60, self.savePeers, site, spawn=True)
+
+ def saveAllPeers(self):
+ for site in list(self.sites.values()):
+ try:
+ self.savePeers(site)
+ except Exception as err:
+ site.log.error("Save peer error: %s" % err)
diff --git a/plugins/PeerDb/__init__.py b/plugins/PeerDb/__init__.py
new file mode 100644
index 00000000..bc8c93b9
--- /dev/null
+++ b/plugins/PeerDb/__init__.py
@@ -0,0 +1,2 @@
+from . import PeerDbPlugin
+
diff --git a/plugins/PeerDb/plugin_info.json b/plugins/PeerDb/plugin_info.json
new file mode 100644
index 00000000..b13915ff
--- /dev/null
+++ b/plugins/PeerDb/plugin_info.json
@@ -0,0 +1,5 @@
+{
+ "name": "PeerDb",
+ "description": "Save/restore peer list on client restart.",
+ "default": "enabled"
+}
\ No newline at end of file
diff --git a/plugins/Sidebar/ConsolePlugin.py b/plugins/Sidebar/ConsolePlugin.py
new file mode 100644
index 00000000..15f6a1ba
--- /dev/null
+++ b/plugins/Sidebar/ConsolePlugin.py
@@ -0,0 +1,101 @@
+import re
+import logging
+
+from Plugin import PluginManager
+from Config import config
+from Debug import Debug
+from util import SafeRe
+from util.Flag import flag
+
+
+class WsLogStreamer(logging.StreamHandler):
+ def __init__(self, stream_id, ui_websocket, filter):
+ self.stream_id = stream_id
+ self.ui_websocket = ui_websocket
+
+ if filter:
+ if not SafeRe.isSafePattern(filter):
+ raise Exception("Not a safe prex pattern")
+ self.filter_re = re.compile(".*" + filter)
+ else:
+ self.filter_re = None
+ return super(WsLogStreamer, self).__init__()
+
+ def emit(self, record):
+ if self.ui_websocket.ws.closed:
+ self.stop()
+ return
+ line = self.format(record)
+ if self.filter_re and not self.filter_re.match(line):
+ return False
+
+ self.ui_websocket.cmd("logLineAdd", {"stream_id": self.stream_id, "lines": [line]})
+
+ def stop(self):
+ logging.getLogger('').removeHandler(self)
+
+
+@PluginManager.registerTo("UiWebsocket")
+class UiWebsocketPlugin(object):
+ def __init__(self, *args, **kwargs):
+ self.log_streamers = {}
+ return super(UiWebsocketPlugin, self).__init__(*args, **kwargs)
+
+ @flag.no_multiuser
+ @flag.admin
+ def actionConsoleLogRead(self, to, filter=None, read_size=32 * 1024, limit=500):
+ log_file_path = "%s/debug.log" % config.log_dir
+ log_file = open(log_file_path, encoding="utf-8")
+ log_file.seek(0, 2)
+ end_pos = log_file.tell()
+ log_file.seek(max(0, end_pos - read_size))
+ if log_file.tell() != 0:
+ log_file.readline() # Partial line junk
+
+ pos_start = log_file.tell()
+ lines = []
+ if filter:
+ assert SafeRe.isSafePattern(filter)
+ filter_re = re.compile(".*" + filter)
+
+ last_match = False
+ for line in log_file:
+ if not line.startswith("[") and last_match: # Multi-line log entry
+ lines.append(line.replace(" ", " "))
+ continue
+
+ if filter and not filter_re.match(line):
+ last_match = False
+ continue
+ last_match = True
+ lines.append(line)
+
+ num_found = len(lines)
+ lines = lines[-limit:]
+
+ return {"lines": lines, "pos_end": log_file.tell(), "pos_start": pos_start, "num_found": num_found}
+
+ def addLogStreamer(self, stream_id, filter=None):
+ logger = WsLogStreamer(stream_id, self, filter)
+ logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s'))
+ logger.setLevel(logging.getLevelName("DEBUG"))
+
+ logging.getLogger('').addHandler(logger)
+ return logger
+
+ @flag.no_multiuser
+ @flag.admin
+ def actionConsoleLogStream(self, to, filter=None):
+ stream_id = to
+ self.log_streamers[stream_id] = self.addLogStreamer(stream_id, filter)
+ self.response(to, {"stream_id": stream_id})
+
+ @flag.no_multiuser
+ @flag.admin
+ def actionConsoleLogStreamRemove(self, to, stream_id):
+ try:
+ self.log_streamers[stream_id].stop()
+ del self.log_streamers[stream_id]
+ return "ok"
+ except Exception as err:
+ return {"error": Debug.formatException(err)}
diff --git a/plugins/Sidebar/SidebarPlugin.py b/plugins/Sidebar/SidebarPlugin.py
new file mode 100644
index 00000000..f5b40e2d
--- /dev/null
+++ b/plugins/Sidebar/SidebarPlugin.py
@@ -0,0 +1,892 @@
+import re
+import os
+import html
+import sys
+import math
+import time
+import json
+import io
+import urllib
+import urllib.parse
+
+import gevent
+
+import util
+from Config import config
+from Plugin import PluginManager
+from Debug import Debug
+from Translate import Translate
+from util import helper
+from util.Flag import flag
+from .ZipStream import ZipStream
+
+plugin_dir = os.path.dirname(__file__)
+media_dir = plugin_dir + "/media"
+
+loc_cache = {}
+if "_" not in locals():
+ _ = Translate(plugin_dir + "/languages/")
+
+
+@PluginManager.registerTo("UiRequest")
+class UiRequestPlugin(object):
+ # Inject our resources to end of original file streams
+ def actionUiMedia(self, path):
+ if path == "/uimedia/all.js" or path == "/uimedia/all.css":
+ # First yield the original file and header
+ body_generator = super(UiRequestPlugin, self).actionUiMedia(path)
+ for part in body_generator:
+ yield part
+
+ # Append our media file to the end
+ ext = re.match(".*(js|css)$", path).group(1)
+ plugin_media_file = "%s/all.%s" % (media_dir, ext)
+ if config.debug:
+ # If debugging merge *.css to all.css and *.js to all.js
+ from Debug import DebugMedia
+ DebugMedia.merge(plugin_media_file)
+ if ext == "js":
+ yield _.translateData(open(plugin_media_file).read()).encode("utf8")
+ else:
+ for part in self.actionFile(plugin_media_file, send_header=False):
+ yield part
+ elif path.startswith("/uimedia/globe/"): # Serve WebGL globe files
+ file_name = re.match(".*/(.*)", path).group(1)
+ plugin_media_file = "%s_globe/%s" % (media_dir, file_name)
+ if config.debug and path.endswith("all.js"):
+ # If debugging merge *.css to all.css and *.js to all.js
+ from Debug import DebugMedia
+ DebugMedia.merge(plugin_media_file)
+ for part in self.actionFile(plugin_media_file):
+ yield part
+ else:
+ for part in super(UiRequestPlugin, self).actionUiMedia(path):
+ yield part
+
+ def actionZip(self):
+ address = self.get["address"]
+ site = self.server.site_manager.get(address)
+ if not site:
+ return self.error404("Site not found")
+
+ title = site.content_manager.contents.get("content.json", {}).get("title", "")
+ filename = "%s-backup-%s.zip" % (title, time.strftime("%Y-%m-%d_%H_%M"))
+ filename_quoted = urllib.parse.quote(filename)
+ self.sendHeader(content_type="application/zip", extra_headers={'Content-Disposition': 'attachment; filename="%s"' % filename_quoted})
+
+ return self.streamZip(site.storage.getPath("."))
+
+ def streamZip(self, dir_path):
+ zs = ZipStream(dir_path)
+ while 1:
+ data = zs.read()
+ if not data:
+ break
+ yield data
+
+
+@PluginManager.registerTo("UiWebsocket")
+class UiWebsocketPlugin(object):
+ def sidebarRenderPeerStats(self, body, site):
+ # Peers by status
+ peers_total = len(site.peers)
+ peers_reachable = 0
+ peers_connectable = 0
+ peers_connected = 0
+ peers_failed = 0
+ # Peers by type
+ peers_by_type = {}
+
+ type_proper_names = {
+ 'ipv4': 'IPv4',
+ 'ipv6': 'IPv6',
+ 'onion': 'Onion',
+ 'unknown': 'Unknown'
+ }
+
+ type_defs = {
+ 'local-ipv4': {
+ 'order' : -21,
+ 'color' : 'yellow'
+ },
+ 'local-ipv6': {
+ 'order' : -20,
+ 'color' : 'orange'
+ },
+ 'ipv4': {
+ 'order' : -11,
+ 'color' : 'blue'
+ },
+ 'ipv6': {
+ 'order' : -10,
+ 'color' : 'darkblue'
+ },
+ 'unknown': {
+ 'order' : 10,
+ 'color' : 'red'
+ },
+ }
+
+ for peer in list(site.peers.values()):
+ # Peers by status
+ if peer.isConnected():
+ peers_connected += 1
+ elif peer.isConnectable():
+ peers_connectable += 1
+ elif peer.isReachable() and not peer.connection_error:
+ peers_reachable += 1
+ elif peer.isReachable() and peer.connection_error:
+ peers_failed += 1
+ # Peers by type
+ peer_type = peer.getIpType()
+ peer_readable_type = type_proper_names.get(peer_type, peer_type)
+ if helper.isPrivateIp(peer.ip):
+ peer_type = 'local-' + peer.getIpType()
+ peer_readable_type = 'Local ' + peer_readable_type
+ peers_by_type[peer_type] = peers_by_type.get(peer_type, {
+ 'type': peer_type,
+ 'readable_type': _[peer_readable_type],
+ 'order': type_defs.get(peer_type, {}).get('order', 0),
+ 'color': type_defs.get(peer_type, {}).get('color', 'purple'),
+ 'count': 0
+ })
+ peers_by_type[peer_type]['count'] += 1
+
+ ########################################################################
+
+ if peers_total:
+ percent_connected = float(peers_connected) / peers_total
+ percent_connectable = float(peers_connectable) / peers_total
+ percent_reachable = float(peers_reachable) / peers_total
+ percent_failed = float(peers_failed) / peers_total
+ percent_other = min(0.0, 1.0 - percent_connected - percent_connectable - percent_reachable - percent_failed)
+ else:
+ percent_connected = 0
+ percent_reachable = 0
+ percent_connectable = 0
+ percent_failed = 0
+ percent_other = 0
+
+ peer_ips = [peer.key for peer in site.getConnectablePeers(20, allow_private=False)]
+ peer_ips.sort(key=lambda peer_ip: ".onion:" in peer_ip)
+ copy_link = "http://127.0.0.1:43110/%s/?zeronet_peers=%s" % (
+ site.content_manager.contents.get("content.json", {}).get("domain", site.address),
+ ",".join(peer_ips)
+ )
+
+ body.append(_("""
+