2015-07-12 20:36:46 +02:00
|
|
|
import time
|
|
|
|
import re
|
2019-04-29 16:44:13 +02:00
|
|
|
from util import helper
|
|
|
|
|
2015-03-19 21:19:14 +01:00
|
|
|
# Special sqlite cursor
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
|
|
|
|
class DbCursor:
|
|
|
|
|
2019-12-17 14:28:52 +01:00
|
|
|
def __init__(self, db):
|
2015-07-12 20:36:46 +02:00
|
|
|
self.db = db
|
|
|
|
self.logging = False
|
|
|
|
|
2019-01-26 20:40:34 +01:00
|
|
|
def quoteValue(self, value):
|
|
|
|
if type(value) is int:
|
|
|
|
return str(value)
|
|
|
|
else:
|
|
|
|
return "'%s'" % value.replace("'", "''")
|
|
|
|
|
2019-03-16 02:40:32 +01:00
|
|
|
def parseQuery(self, query, params):
|
|
|
|
query_type = query.split(" ", 1)[0].upper()
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
if isinstance(params, dict) and "?" in query: # Make easier select and insert by allowing dict params
|
2019-03-16 02:40:32 +01:00
|
|
|
if query_type in ("SELECT", "DELETE", "UPDATE"):
|
Version 0.3.4, Rev656, CryptMessage plugin for AES and ECIES encryption, Added pyelliptic lib for OpenSSSL based encryption methods, Test CryptMessage plugin, Force reload content.json before signing and after write, Escaped Sql IN queries support, Test Sql parameter escaping, ui_websocket Test fixture, Plugin testing support, Always return websocket errors as dict, Wait for file on weboscket fileGet command if its already in bad_files queue, PushState and ReplaceState url manipulation support in wrapper API, Per auth-address localstorage, Longer timeout for udp tracker query
2015-12-10 21:36:20 +01:00
|
|
|
# Convert param dict to SELECT * FROM table WHERE key = ? AND key2 = ? format
|
|
|
|
query_wheres = []
|
|
|
|
values = []
|
|
|
|
for key, value in params.items():
|
|
|
|
if type(value) is list:
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
if key.startswith("not__"):
|
2019-01-26 20:40:34 +01:00
|
|
|
field = key.replace("not__", "")
|
|
|
|
operator = "NOT IN"
|
|
|
|
else:
|
|
|
|
field = key
|
|
|
|
operator = "IN"
|
|
|
|
if len(value) > 100:
|
|
|
|
# Embed values in query to avoid "too many SQL variables" error
|
2019-04-29 16:44:13 +02:00
|
|
|
query_values = ",".join(map(helper.sqlquote, value))
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
else:
|
2019-01-26 20:40:34 +01:00
|
|
|
query_values = ",".join(["?"] * len(value))
|
|
|
|
values += value
|
2019-03-16 02:40:32 +01:00
|
|
|
query_wheres.append(
|
|
|
|
"%s %s (%s)" %
|
2019-01-26 20:40:34 +01:00
|
|
|
(field, operator, query_values)
|
|
|
|
)
|
Version 0.3.4, Rev656, CryptMessage plugin for AES and ECIES encryption, Added pyelliptic lib for OpenSSSL based encryption methods, Test CryptMessage plugin, Force reload content.json before signing and after write, Escaped Sql IN queries support, Test Sql parameter escaping, ui_websocket Test fixture, Plugin testing support, Always return websocket errors as dict, Wait for file on weboscket fileGet command if its already in bad_files queue, PushState and ReplaceState url manipulation support in wrapper API, Per auth-address localstorage, Longer timeout for udp tracker query
2015-12-10 21:36:20 +01:00
|
|
|
else:
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
if key.startswith("not__"):
|
2016-11-07 23:09:06 +01:00
|
|
|
query_wheres.append(key.replace("not__", "") + " != ?")
|
2019-07-10 03:14:09 +02:00
|
|
|
elif key.endswith("__like"):
|
|
|
|
query_wheres.append(key.replace("__like", "") + " LIKE ?")
|
2017-10-04 13:10:41 +02:00
|
|
|
elif key.endswith(">"):
|
|
|
|
query_wheres.append(key.replace(">", "") + " > ?")
|
|
|
|
elif key.endswith("<"):
|
|
|
|
query_wheres.append(key.replace("<", "") + " < ?")
|
Version 0.3.5, Rev830, Full Tor mode support with hidden services, Onion stats in Sidebar, GeoDB download fix using Tor, Gray out disabled sites in Stats page, Tor hidden service status in stat page, Benchmark sha256, Skyts tracker out expodie in, 2 new tracker using ZeroNet protocol, Keep SSL cert option between restarts, SSL Certificate pinning support for connections, Site lock support for connections, Certificate pinned connections using implicit SSL, Flood protection whitelist support, Foreign keys support for DB layer, Not support for SQL query helper, 0 length file get bugfix, Pex onion address support, Faster port testing, Faster uPnP port opening, Need connections more often on owned sites, Delay ZeroHello startup message if port check or Tor manager not ready yet, Use lockfiles to avoid double start, Save original socket on proxy monkey patching to get ability to connect localhost directly, Handle atomic write errors, Broken gevent https workaround helper, Rsa crypt functions, Plugin to Bootstrap using ZeroNet protocol
2016-01-05 00:20:52 +01:00
|
|
|
else:
|
2016-11-07 23:09:06 +01:00
|
|
|
query_wheres.append(key + " = ?")
|
Version 0.3.4, Rev656, CryptMessage plugin for AES and ECIES encryption, Added pyelliptic lib for OpenSSSL based encryption methods, Test CryptMessage plugin, Force reload content.json before signing and after write, Escaped Sql IN queries support, Test Sql parameter escaping, ui_websocket Test fixture, Plugin testing support, Always return websocket errors as dict, Wait for file on weboscket fileGet command if its already in bad_files queue, PushState and ReplaceState url manipulation support in wrapper API, Per auth-address localstorage, Longer timeout for udp tracker query
2015-12-10 21:36:20 +01:00
|
|
|
values.append(value)
|
|
|
|
wheres = " AND ".join(query_wheres)
|
2017-10-04 13:10:26 +02:00
|
|
|
if wheres == "":
|
|
|
|
wheres = "1"
|
|
|
|
query = re.sub("(.*)[?]", "\\1 %s" % wheres, query) # Replace the last ?
|
Version 0.3.4, Rev656, CryptMessage plugin for AES and ECIES encryption, Added pyelliptic lib for OpenSSSL based encryption methods, Test CryptMessage plugin, Force reload content.json before signing and after write, Escaped Sql IN queries support, Test Sql parameter escaping, ui_websocket Test fixture, Plugin testing support, Always return websocket errors as dict, Wait for file on weboscket fileGet command if its already in bad_files queue, PushState and ReplaceState url manipulation support in wrapper API, Per auth-address localstorage, Longer timeout for udp tracker query
2015-12-10 21:36:20 +01:00
|
|
|
params = values
|
2015-07-12 20:36:46 +02:00
|
|
|
else:
|
|
|
|
# Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format
|
|
|
|
keys = ", ".join(params.keys())
|
|
|
|
values = ", ".join(['?' for key in params.keys()])
|
Version 0.3.6, Rev879, Fix sidebar error on description missing, New trayicon, New favicon, Disable some functions on MultiUser proxies, New homepage, Replace only the last ? in SQL queries, Alwaays grant ADMIN permission to homepage site, Announce before publish if no peers, configSet, serverShutdown, ADMIN WebsocketAPI command, Stop Tor client before updating, Ignore peer ip packing error, Ignore db files from git, Fix safari ajax error when UiPassword enabled
2016-02-02 11:40:45 +01:00
|
|
|
keysvalues = "(%s) VALUES (%s)" % (keys, values)
|
|
|
|
query = re.sub("(.*)[?]", "\\1%s" % keysvalues, query) # Replace the last ?
|
2015-07-12 20:36:46 +02:00
|
|
|
params = tuple(params.values())
|
2017-12-30 08:06:05 +01:00
|
|
|
elif isinstance(params, dict) and ":" in query:
|
|
|
|
new_params = dict()
|
|
|
|
values = []
|
|
|
|
for key, value in params.items():
|
|
|
|
if type(value) is list:
|
|
|
|
for idx, val in enumerate(value):
|
2017-12-30 10:06:07 +01:00
|
|
|
new_params[key + "__" + str(idx)] = val
|
2017-12-30 08:06:05 +01:00
|
|
|
|
2017-12-30 10:06:07 +01:00
|
|
|
new_names = [":" + key + "__" + str(idx) for idx in range(len(value))]
|
2018-01-05 20:22:25 +01:00
|
|
|
query = re.sub(r":" + re.escape(key) + r"([)\s]|$)", "(%s)%s" % (", ".join(new_names), r"\1"), query)
|
2017-12-30 08:06:05 +01:00
|
|
|
else:
|
|
|
|
new_params[key] = value
|
|
|
|
|
|
|
|
params = new_params
|
2019-03-16 02:40:32 +01:00
|
|
|
return query, params
|
|
|
|
|
|
|
|
def execute(self, query, params=None):
|
|
|
|
query = query.strip()
|
2019-11-30 02:11:11 +01:00
|
|
|
while self.db.progress_sleeping or self.db.commiting:
|
2019-03-18 03:36:12 +01:00
|
|
|
time.sleep(0.1)
|
2017-12-30 08:06:05 +01:00
|
|
|
|
2019-03-18 03:36:12 +01:00
|
|
|
self.db.last_query_time = time.time()
|
2019-03-16 02:40:32 +01:00
|
|
|
|
|
|
|
query, params = self.parseQuery(query, params)
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2019-12-17 14:28:52 +01:00
|
|
|
cursor = self.db.getConn().cursor()
|
|
|
|
self.db.cursors.add(cursor)
|
2019-12-18 16:43:46 +01:00
|
|
|
if self.db.lock.locked():
|
|
|
|
self.db.log.debug("Locked for %.3fs" % (time.time() - self.db.lock.time_lock))
|
|
|
|
|
|
|
|
try:
|
|
|
|
s = time.time()
|
|
|
|
self.db.lock.acquire(True)
|
2020-01-22 16:36:33 +01:00
|
|
|
if query.upper().strip("; ") == "VACUUM":
|
|
|
|
self.db.commit("vacuum called")
|
2019-12-18 16:43:46 +01:00
|
|
|
if params:
|
|
|
|
res = cursor.execute(query, params)
|
|
|
|
else:
|
|
|
|
res = cursor.execute(query)
|
|
|
|
finally:
|
|
|
|
self.db.lock.release()
|
2019-11-27 03:07:08 +01:00
|
|
|
|
2019-12-17 14:28:52 +01:00
|
|
|
taken_query = time.time() - s
|
2019-12-31 12:45:36 +01:00
|
|
|
if self.logging or taken_query > 1:
|
2019-12-17 14:28:52 +01:00
|
|
|
if params: # Query has parameters
|
|
|
|
self.db.log.debug("Query: " + query + " " + str(params) + " (Done in %.4f)" % (time.time() - s))
|
2019-11-23 13:22:36 +01:00
|
|
|
else:
|
2019-12-17 14:28:52 +01:00
|
|
|
self.db.log.debug("Query: " + query + " (Done in %.4f)" % (time.time() - s))
|
2015-07-12 20:36:46 +02:00
|
|
|
|
|
|
|
# Log query stats
|
|
|
|
if self.db.collect_stats:
|
|
|
|
if query not in self.db.query_stats:
|
|
|
|
self.db.query_stats[query] = {"call": 0, "time": 0.0}
|
|
|
|
self.db.query_stats[query]["call"] += 1
|
|
|
|
self.db.query_stats[query]["time"] += time.time() - s
|
|
|
|
|
2019-11-27 03:07:08 +01:00
|
|
|
query_type = query.split(" ", 1)[0].upper()
|
|
|
|
is_update_query = query_type in ["UPDATE", "DELETE", "INSERT", "CREATE"]
|
|
|
|
if not self.db.need_commit and is_update_query:
|
|
|
|
self.db.need_commit = True
|
|
|
|
|
|
|
|
if is_update_query:
|
|
|
|
return cursor
|
|
|
|
else:
|
|
|
|
return res
|
|
|
|
|
|
|
|
def executemany(self, query, params):
|
2019-11-30 02:11:11 +01:00
|
|
|
while self.db.progress_sleeping or self.db.commiting:
|
2019-11-27 03:07:08 +01:00
|
|
|
time.sleep(0.1)
|
|
|
|
|
|
|
|
self.db.last_query_time = time.time()
|
|
|
|
|
|
|
|
s = time.time()
|
2019-12-17 14:28:52 +01:00
|
|
|
cursor = self.db.getConn().cursor()
|
|
|
|
self.db.cursors.add(cursor)
|
2019-11-27 03:07:08 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
self.db.lock.acquire(True)
|
|
|
|
cursor.executemany(query, params)
|
|
|
|
finally:
|
|
|
|
self.db.lock.release()
|
|
|
|
|
2019-11-30 02:10:11 +01:00
|
|
|
taken_query = time.time() - s
|
|
|
|
if self.logging or taken_query > 0.1:
|
2019-12-04 17:15:08 +01:00
|
|
|
self.db.log.debug("Execute many: %s (Done in %.4f)" % (query, taken_query))
|
2019-03-16 02:40:32 +01:00
|
|
|
|
2019-11-30 02:10:40 +01:00
|
|
|
self.db.need_commit = True
|
|
|
|
|
2019-11-27 03:07:08 +01:00
|
|
|
return cursor
|
2015-07-12 20:36:46 +02:00
|
|
|
|
2016-11-07 23:08:57 +01:00
|
|
|
# Creates on updates a database row without incrementing the rowid
|
|
|
|
def insertOrUpdate(self, table, query_sets, query_wheres, oninsert={}):
|
|
|
|
sql_sets = ["%s = :%s" % (key, key) for key in query_sets.keys()]
|
|
|
|
sql_wheres = ["%s = :%s" % (key, key) for key in query_wheres.keys()]
|
|
|
|
|
|
|
|
params = query_sets
|
|
|
|
params.update(query_wheres)
|
2019-11-27 03:07:08 +01:00
|
|
|
res = self.execute(
|
2016-11-07 23:08:57 +01:00
|
|
|
"UPDATE %s SET %s WHERE %s" % (table, ", ".join(sql_sets), " AND ".join(sql_wheres)),
|
|
|
|
params
|
|
|
|
)
|
2019-11-27 03:07:08 +01:00
|
|
|
if res.rowcount == 0:
|
2016-11-07 23:08:57 +01:00
|
|
|
params.update(oninsert) # Add insert-only fields
|
|
|
|
self.execute("INSERT INTO %s ?" % table, params)
|
|
|
|
|
2015-07-12 20:36:46 +02:00
|
|
|
# Create new table
|
|
|
|
# Return: True on success
|
|
|
|
def createTable(self, table, cols):
|
|
|
|
# TODO: Check current structure
|
|
|
|
self.execute("DROP TABLE IF EXISTS %s" % table)
|
|
|
|
col_definitions = []
|
|
|
|
for col_name, col_type in cols:
|
|
|
|
col_definitions.append("%s %s" % (col_name, col_type))
|
|
|
|
|
|
|
|
self.execute("CREATE TABLE %s (%s)" % (table, ",".join(col_definitions)))
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Create indexes on table
|
|
|
|
# Return: True on success
|
|
|
|
def createIndexes(self, table, indexes):
|
|
|
|
for index in indexes:
|
2018-12-15 17:48:19 +01:00
|
|
|
if not index.strip().upper().startswith("CREATE"):
|
|
|
|
self.db.log.error("Index command should start with CREATE: %s" % index)
|
|
|
|
continue
|
2015-07-12 20:36:46 +02:00
|
|
|
self.execute(index)
|
|
|
|
|
|
|
|
# Create table if not exist
|
|
|
|
# Return: True if updated
|
|
|
|
def needTable(self, table, cols, indexes=None, version=1):
|
|
|
|
current_version = self.db.getTableVersion(table)
|
|
|
|
if int(current_version) < int(version): # Table need update or not extis
|
2019-10-16 15:43:28 +02:00
|
|
|
self.db.log.debug("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version))
|
2015-07-12 20:36:46 +02:00
|
|
|
self.createTable(table, cols)
|
|
|
|
if indexes:
|
|
|
|
self.createIndexes(table, indexes)
|
|
|
|
self.execute(
|
|
|
|
"INSERT OR REPLACE INTO keyvalue ?",
|
|
|
|
{"json_id": 0, "key": "table.%s.version" % table, "value": version}
|
|
|
|
)
|
|
|
|
return True
|
|
|
|
else: # Not changed
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Get or create a row for json file
|
|
|
|
# Return: The database row
|
|
|
|
def getJsonRow(self, file_path):
|
|
|
|
directory, file_name = re.match("^(.*?)/*([^/]*)$", file_path).groups()
|
|
|
|
if self.db.schema["version"] == 1:
|
2016-08-10 12:46:27 +02:00
|
|
|
# One path field
|
2015-07-12 20:36:46 +02:00
|
|
|
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
|
|
|
|
row = res.fetchone()
|
|
|
|
if not row: # No row yet, create it
|
|
|
|
self.execute("INSERT INTO json ?", {"path": file_path})
|
|
|
|
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"path": file_path})
|
|
|
|
row = res.fetchone()
|
2016-08-10 12:46:27 +02:00
|
|
|
elif self.db.schema["version"] == 2:
|
|
|
|
# Separate directory, file_name (easier join)
|
2015-07-12 20:36:46 +02:00
|
|
|
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
|
|
|
|
row = res.fetchone()
|
|
|
|
if not row: # No row yet, create it
|
|
|
|
self.execute("INSERT INTO json ?", {"directory": directory, "file_name": file_name})
|
|
|
|
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"directory": directory, "file_name": file_name})
|
|
|
|
row = res.fetchone()
|
2016-08-10 12:46:27 +02:00
|
|
|
elif self.db.schema["version"] == 3:
|
|
|
|
# Separate site, directory, file_name (for merger sites)
|
|
|
|
site_address, directory = re.match("^([^/]*)/(.*)$", directory).groups()
|
|
|
|
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name})
|
|
|
|
row = res.fetchone()
|
|
|
|
if not row: # No row yet, create it
|
|
|
|
self.execute("INSERT INTO json ?", {"site": site_address, "directory": directory, "file_name": file_name})
|
|
|
|
res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name})
|
|
|
|
row = res.fetchone()
|
2017-07-18 20:53:52 +02:00
|
|
|
else:
|
|
|
|
raise Exception("Dbschema version %s not supported" % self.db.schema.get("version"))
|
2015-07-12 20:36:46 +02:00
|
|
|
return row
|
|
|
|
|
|
|
|
def close(self):
|
2019-11-27 03:07:08 +01:00
|
|
|
pass
|