Merge remote-tracking branch 'upstream/master'

This commit is contained in:
Felix Imobersteg 2015-05-31 16:25:49 +02:00
commit 9406945cf9
19 changed files with 179 additions and 105 deletions

View File

@ -1,6 +1,6 @@
# ZeroNet # ZeroNet
Decentralized websites using Bitcoin crypto and the BitTorrent network Decentralized websites using Bitcoin crypto and the BitTorrent network - http://zeronet.io
## Why? ## Why?

View File

@ -394,7 +394,7 @@ class UiRequestPlugin(object):
schema = { schema = {
"db_name": "TestDb", "db_name": "TestDb",
"db_file": "data/benchmark.db", "db_file": "%s/benchmark.db" % config.data_dir,
"maps": { "maps": {
".*": { ".*": {
"to_table": { "to_table": {
@ -415,17 +415,17 @@ class UiRequestPlugin(object):
} }
} }
if os.path.isfile("data/benchmark.db"): os.unlink("data/benchmark.db") if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir)
with benchmark("Open x 10", 0.13): with benchmark("Open x 10", 0.13):
for i in range(10): for i in range(10):
db = Db(schema, "data/benchmark.db") db = Db(schema, "%s/benchmark.db" % config.data_dir)
db.checkTables() db.checkTables()
db.close() db.close()
yield "." yield "."
db = Db(schema, "data/benchmark.db") db = Db(schema, "%s/benchmark.db" % config.data_dir)
db.checkTables() db.checkTables()
import json import json
@ -434,9 +434,9 @@ class UiRequestPlugin(object):
data = {"test": []} data = {"test": []}
for i in range(1000): # 1000 line of data for i in range(1000): # 1000 line of data
data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
json.dump(data, open("data/test_%s.json" % u, "w")) json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
db.loadJson("data/test_%s.json" % u) db.loadJson("%s/test_%s.json" % (config.data_dir, u))
os.unlink("data/test_%s.json" % u) os.unlink("%s/test_%s.json" % (config.data_dir, u))
yield "." yield "."
@ -448,9 +448,9 @@ class UiRequestPlugin(object):
data = {"test": []} data = {"test": []}
for i in range(100): # 1000 line of data for i in range(100): # 1000 line of data
data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
json.dump(data, open("data/test_%s.json" % u, "w")) json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
db.loadJson("data/test_%s.json" % u, cur=cur) db.loadJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
os.unlink("data/test_%s.json" % u) os.unlink("%s/test_%s.json" % (config.data_dir, u))
if u%10 == 0: yield "." if u%10 == 0: yield "."
cur.execute("COMMIT") cur.execute("COMMIT")
@ -496,7 +496,7 @@ class UiRequestPlugin(object):
db.close() db.close()
if os.path.isfile("data/benchmark.db"): os.unlink("data/benchmark.db") if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir)
gc.collect() # Implicit grabage collection gc.collect() # Implicit grabage collection

View File

@ -11,7 +11,7 @@ log = logging.getLogger("DnschainPlugin")
@PluginManager.registerTo("SiteManager") @PluginManager.registerTo("SiteManager")
class SiteManagerPlugin(object): class SiteManagerPlugin(object):
dns_cache_path = "data/dns_cache.json" dns_cache_path = "%s/dns_cache.json" % config.data_dir
dns_cache = None dns_cache = None
# Checks if its a valid address # Checks if its a valid address

View File

@ -4,7 +4,7 @@ import ConfigParser
class Config(object): class Config(object):
def __init__(self): def __init__(self):
self.version = "0.3.0" self.version = "0.3.0"
self.rev = 193 self.rev = 196
self.parser = self.createArguments() self.parser = self.createArguments()
argv = sys.argv[:] # Copy command line arguments argv = sys.argv[:] # Copy command line arguments
argv = self.parseConfig(argv) # Add arguments from config file argv = self.parseConfig(argv) # Add arguments from config file
@ -103,12 +103,16 @@ class Config(object):
parser.add_argument('--debug', help='Debug mode', action='store_true') parser.add_argument('--debug', help='Debug mode', action='store_true')
parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
parser.add_argument('--config_file', help='Path of config file', default="zeronet.conf", metavar="path")
parser.add_argument('--data_dir', help='Path of data directory', default="data", metavar="path")
parser.add_argument('--log_dir', help='Path of logging directory', default="log", metavar="path")
parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port') parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*') parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name') parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name')
parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address') parser.add_argument('--homepage', help='Web interface Homepage', default='1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr', metavar='address')
parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size_limit') parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, metavar='size')
parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, type=int, metavar='port') parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, type=int, metavar='port')
@ -151,17 +155,19 @@ class Config(object):
action = self.getAction(argv) action = self.getAction(argv)
if len(argv) == 1 or not action: # If no action specificed set the main action if len(argv) == 1 or not action: # If no action specificed set the main action
argv.append("main") argv.append("main")
if "zeronet.py" in argv[0]: self.arguments = self.parser.parse_args(argv[1:])
self.arguments = self.parser.parse_args(argv[1:])
else: # Silent errors if not started with zeronet.py
self.arguments = self.parser.parse_args(argv[1:])
# Parse config file # Parse config file
def parseConfig(self, argv): def parseConfig(self, argv):
if os.path.isfile("zeronet.conf"): # Find config file path from parameters
config_file = "zeronet.conf"
if "--config_file" in argv:
config_file = argv[argv.index("--config_file")+1]
# Load config file
if os.path.isfile(config_file):
config = ConfigParser.ConfigParser(allow_no_value=True) config = ConfigParser.ConfigParser(allow_no_value=True)
config.read('zeronet.conf') config.read(config_file)
for section in config.sections(): for section in config.sections():
for key, val in config.items(section): for key, val in config.items(section):
if section != "global": # If not global prefix key with section if section != "global": # If not global prefix key with section

View File

@ -35,7 +35,7 @@ class DebugReloader:
def changed(self, evt): def changed(self, evt):
if not evt.path or "data/" in evt.path or evt.path.endswith("pyc") or time.time()-self.last_chaged < 1: return False # Ignore *.pyc changes and no reload within 1 sec if not evt.path or "%s/" % config.data_dir in evt.path or evt.path.endswith("pyc") or time.time()-self.last_chaged < 1: return False # Ignore *.pyc changes and no reload within 1 sec
#logging.debug("Changed: %s" % evt) #logging.debug("Changed: %s" % evt)
time.sleep(0.1) # Wait for lock release time.sleep(0.1) # Wait for lock release
self.callback() self.callback()

View File

@ -154,7 +154,7 @@ class FileRequest(object):
for peer in params["peers"]: # Add sent peers to site for peer in params["peers"]: # Add sent peers to site
address = self.unpackAddress(peer) address = self.unpackAddress(peer)
got_peer_keys.append("%s:%s" % address) got_peer_keys.append("%s:%s" % address)
if (site.addPeer(*address)): added += 1 if site.addPeer(*address): added += 1
# Send back peers that is not in the sent list and connectable (not port 0) # Send back peers that is not in the sent list and connectable (not port 0)
packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)] packed_peers = [peer.packAddress() for peer in site.getConnectablePeers(params["need"], got_peer_keys)]
if added: if added:

View File

@ -59,7 +59,7 @@ class Site:
# Load site settings from data/sites.json # Load site settings from data/sites.json
def loadSettings(self): def loadSettings(self):
sites_settings = json.load(open("data/sites.json")) sites_settings = json.load(open("%s/sites.json" % config.data_dir))
if self.address in sites_settings: if self.address in sites_settings:
self.settings = sites_settings[self.address] self.settings = sites_settings[self.address]
else: else:
@ -73,9 +73,9 @@ class Site:
# Save site settings to data/sites.json # Save site settings to data/sites.json
def saveSettings(self): def saveSettings(self):
sites_settings = json.load(open("data/sites.json")) sites_settings = json.load(open("%s/sites.json" % config.data_dir))
sites_settings[self.address] = self.settings sites_settings[self.address] = self.settings
open("data/sites.json", "w").write(json.dumps(sites_settings, indent=2, sort_keys=True)) open("%s/sites.json" % config.data_dir, "w").write(json.dumps(sites_settings, indent=2, sort_keys=True))
return return

View File

@ -1,6 +1,7 @@
import json, logging, time, re, os import json, logging, time, re, os
import gevent import gevent
from Plugin import PluginManager from Plugin import PluginManager
from Config import config
TRACKERS = [ TRACKERS = [
("udp", "open.demonii.com", 1337), ("udp", "open.demonii.com", 1337),
@ -33,8 +34,8 @@ class SiteManager(object):
address_found = [] address_found = []
added = 0 added = 0
# Load new adresses # Load new adresses
for address in json.load(open("data/sites.json")): for address in json.load(open("%s/sites.json" % config.data_dir)):
if address not in self.sites and os.path.isfile("data/%s/content.json" % address): if address not in self.sites and os.path.isfile("%s/%s/content.json" % (config.data_dir, address)):
self.sites[address] = Site(address) self.sites[address] = Site(address)
added += 1 added += 1
address_found.append(address) address_found.append(address)

View File

@ -2,12 +2,13 @@ import os, re, shutil, json, time, sqlite3
import gevent.event import gevent.event
from Db import Db from Db import Db
from Debug import Debug from Debug import Debug
from Config import config
class SiteStorage: class SiteStorage:
def __init__(self, site, allow_create=True): def __init__(self, site, allow_create=True):
self.site = site self.site = site
self.directory = "data/%s" % self.site.address # Site data diretory self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
self.log = site.log self.log = site.log
self.db = None # Db class self.db = None # Db class
self.db_checked = False # Checked db tables since startup self.db_checked = False # Checked db tables since startup

View File

@ -120,11 +120,11 @@ class TestCase(unittest.TestCase):
def testDb(self): def testDb(self):
print "Importing db..." print "Importing db..."
from Db import Db from Db import Db
for db_path in [os.path.abspath("data/test/zeronet.db"), "data/test/zeronet.db"]: for db_path in [os.path.abspath("%s/test/zeronet.db" % config.data_dir), "%s/test/zeronet.db" % config.data_dir]:
print "Creating db using %s..." % db_path, print "Creating db using %s..." % db_path,
schema = { schema = {
"db_name": "TestDb", "db_name": "TestDb",
"db_file": "data/test/zeronet.db", "db_file": "%s/test/zeronet.db" % config.data_dir,
"map": { "map": {
"data.json": { "data.json": {
"to_table": { "to_table": {
@ -144,14 +144,14 @@ class TestCase(unittest.TestCase):
} }
} }
if os.path.isfile("data/test/zeronet.db"): os.unlink("data/test/zeronet.db") if os.path.isfile("%s/test/zeronet.db" % config.data_dir): os.unlink("%s/test/zeronet.db" % config.data_dir)
db = Db(schema, "data/test/zeronet.db") db = Db(schema, "%s/test/zeronet.db" % config.data_dir)
db.checkTables() db.checkTables()
db.close() db.close()
# Cleanup # Cleanup
os.unlink("data/test/zeronet.db") os.unlink("%s/test/zeronet.db" % config.data_dir)
os.rmdir("data/test/") os.rmdir("%s/test/" % config.data_dir)
def testContentManagerIncludes(self): def testContentManagerIncludes(self):

View File

@ -236,8 +236,8 @@ class UiRequest(object):
if match: # Looks like a valid path if match: # Looks like a valid path
address = match.group("address") address = match.group("address")
file_path = "data/%s/%s" % (address, match.group("inner_path")) file_path = "%s/%s/%s" % (config.data_dir, address, match.group("inner_path"))
allowed_dir = os.path.abspath("data/%s" % address) # Only files within data/sitehash allowed allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed
data_dir = os.path.abspath("data") # No files from data/ allowed data_dir = os.path.abspath("data") # No files from data/ allowed
if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir) or allowed_dir == data_dir: # File not in allowed path if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir) or allowed_dir == data_dir: # File not in allowed path
return self.error403() return self.error403()

View File

@ -1,6 +1,8 @@
import logging, json, time import logging, json, time
from Crypt import CryptBitcoin from Crypt import CryptBitcoin
from Plugin import PluginManager from Plugin import PluginManager
from Config import config
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
class User(object): class User(object):
@ -22,13 +24,13 @@ class User(object):
# Save to data/users.json # Save to data/users.json
def save(self): def save(self):
users = json.load(open("data/users.json")) users = json.load(open("%s/users.json" % config.data_dir))
if not self.master_address in users: users[self.master_address] = {} # Create if not exits if not self.master_address in users: users[self.master_address] = {} # Create if not exits
user_data = users[self.master_address] user_data = users[self.master_address]
if self.master_seed: user_data["master_seed"] = self.master_seed if self.master_seed: user_data["master_seed"] = self.master_seed
user_data["sites"] = self.sites user_data["sites"] = self.sites
user_data["certs"] = self.certs user_data["certs"] = self.certs
open("data/users.json", "w").write(json.dumps(users, indent=2, sort_keys=True)) open("%s/users.json" % config.data_dir, "w").write(json.dumps(users, indent=2, sort_keys=True))
self.log.debug("Saved") self.log.debug("Saved")

View File

@ -1,6 +1,7 @@
import json, logging, os import json, logging, os
from User import User from User import User
from Plugin import PluginManager from Plugin import PluginManager
from Config import config
@PluginManager.acceptPlugins @PluginManager.acceptPlugins
@ -16,7 +17,7 @@ class UserManager(object):
user_found = [] user_found = []
added = 0 added = 0
# Load new users # Load new users
for master_address, data in json.load(open("data/users.json")).items(): for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items():
if master_address not in self.users: if master_address not in self.users:
user = User(master_address, data=data) user = User(master_address, data=data)
self.users[master_address] = user self.users[master_address] = user

View File

@ -40,7 +40,11 @@ class Worker:
self.task = task self.task = task
site = task["site"] site = task["site"]
task["workers_num"] += 1 task["workers_num"] += 1
buff = self.peer.getFile(site.address, task["inner_path"]) try:
buff = self.peer.getFile(site.address, task["inner_path"])
except Exception, err:
self.manager.log.debug("%s: getFile error: err" % (self.key, err))
buff = None
if self.running == False: # Worker no longer needed or got killed if self.running == False: # Worker no longer needed or got killed
self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"]))
break break

View File

@ -74,9 +74,24 @@ def public_key_to_bc_address(public_key):
h160 = hash_160(public_key) h160 = hash_160(public_key)
return hash_160_to_bc_address(h160) return hash_160_to_bc_address(h160)
def encode(val, base, minlen=0):
base, minlen = int(base), int(minlen)
code_string = ''.join([chr(x) for x in range(256)])
result = ""
while val > 0:
result = code_string[val % base] + result
val //= base
return code_string[0] * max(minlen - len(result), 0) + result
def num_to_var_int(x):
x = int(x)
if x < 253: return chr(x)
elif x < 65536: return chr(253)+encode(x, 256, 2)[::-1]
elif x < 4294967296: return chr(254) + encode(x, 256, 4)[::-1]
else: return chr(255) + encode(x, 256, 8)[::-1]
def msg_magic(message): def msg_magic(message):
#return "\x18Bitcoin Signed Message:\n" + chr( len(message) ) + message return "\x18Bitcoin Signed Message:\n" + num_to_var_int( len(message) ) + message
return "\x18Bitcoin Signed Message:\n" + chr( len(message) ) + message
def get_address(eckey): def get_address(eckey):
size = ssl.i2o_ECPublicKey (eckey, 0) size = ssl.i2o_ECPublicKey (eckey, 0)

View File

@ -10,6 +10,8 @@ import ctypes
import ctypes.util import ctypes.util
import hashlib import hashlib
import base64 import base64
import time
import logging
addrtype = 0 addrtype = 0
class _OpenSSL: class _OpenSSL:
@ -17,6 +19,7 @@ class _OpenSSL:
Wrapper for OpenSSL using ctypes Wrapper for OpenSSL using ctypes
""" """
def __init__(self, library): def __init__(self, library):
self.time_opened = time.time()
""" """
Build the wrapper Build the wrapper
""" """
@ -172,14 +175,23 @@ class _OpenSSL:
self.i2o_ECPublicKey.restype = ctypes.c_void_p self.i2o_ECPublicKey.restype = ctypes.c_void_p
self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
self.BN_CTX_free = self._lib.BN_CTX_free
self.BN_CTX_free.restype = None
self.BN_CTX_free.argtypes = [ctypes.c_void_p]
self.EC_POINT_free = self._lib.EC_POINT_free
self.EC_POINT_free.restype = None
self.EC_POINT_free.argtypes = [ctypes.c_void_p]
def openLibrary():
global ssl
try:
ssl = _OpenSSL("src/lib/opensslVerify/libeay32.dll")
except:
ssl = _OpenSSL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32')
try: openLibrary()
ssl = _OpenSSL("src/lib/opensslVerify/libeay32.dll")
except:
ssl = _OpenSSL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or 'libeay32')
openssl_version = "%.9X" % ssl._lib.SSLeay() openssl_version = "%.9X" % ssl._lib.SSLeay()
NID_secp256k1 = 714 NID_secp256k1 = 714
@ -296,51 +308,58 @@ def SetCompactSignature(pkey, hash, signature):
def ECDSA_SIG_recover_key_GFp(eckey, r, s, msg, msglen, recid, check): def ECDSA_SIG_recover_key_GFp(eckey, r, s, msg, msglen, recid, check):
n = 0 n = 0
i = recid / 2 i = recid / 2
ctx = R = O = Q = None
group = ssl.EC_KEY_get0_group(eckey) try:
ctx = ssl.BN_CTX_new() group = ssl.EC_KEY_get0_group(eckey)
ssl.BN_CTX_start(ctx) ctx = ssl.BN_CTX_new()
order = ssl.BN_CTX_get(ctx) ssl.BN_CTX_start(ctx)
ssl.EC_GROUP_get_order(group, order, ctx) order = ssl.BN_CTX_get(ctx)
x = ssl.BN_CTX_get(ctx) ssl.EC_GROUP_get_order(group, order, ctx)
ssl.BN_copy(x, order); x = ssl.BN_CTX_get(ctx)
ssl.BN_mul_word(x, i); ssl.BN_copy(x, order);
ssl.BN_add(x, x, r) ssl.BN_mul_word(x, i);
field = ssl.BN_CTX_get(ctx) ssl.BN_add(x, x, r)
ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx) field = ssl.BN_CTX_get(ctx)
ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx)
if (ssl.BN_cmp(x, field) >= 0): if (ssl.BN_cmp(x, field) >= 0):
return False
R = ssl.EC_POINT_new(group)
ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx)
if check:
O = ssl.EC_POINT_new(group)
ssl.EC_POINT_mul(group, O, None, R, order, ctx)
if ssl.EC_POINT_is_at_infinity(group, O):
return False return False
Q = ssl.EC_POINT_new(group) R = ssl.EC_POINT_new(group)
n = ssl.EC_GROUP_get_degree(group) ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx)
e = ssl.BN_CTX_get(ctx)
ssl.BN_bin2bn(msg, msglen, e)
if 8 * msglen > n: ssl.BN_rshift(e, e, 8 - (n & 7))
zero = ssl.BN_CTX_get(ctx) if check:
ssl.BN_set_word(zero, 0) O = ssl.EC_POINT_new(group)
ssl.BN_mod_sub(e, zero, e, order, ctx) ssl.EC_POINT_mul(group, O, None, R, order, ctx)
rr = ssl.BN_CTX_get(ctx); if ssl.EC_POINT_is_at_infinity(group, O):
ssl.BN_mod_inverse(rr, r, order, ctx) return False
sor = ssl.BN_CTX_get(ctx)
ssl.BN_mod_mul(sor, s, rr, order, ctx)
eor = ssl.BN_CTX_get(ctx)
ssl.BN_mod_mul(eor, e, rr, order, ctx)
ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx)
ssl.EC_KEY_set_public_key(eckey, Q)
return eckey
def close(): Q = ssl.EC_POINT_new(group)
n = ssl.EC_GROUP_get_degree(group)
e = ssl.BN_CTX_get(ctx)
ssl.BN_bin2bn(msg, msglen, e)
if 8 * msglen > n: ssl.BN_rshift(e, e, 8 - (n & 7))
zero = ssl.BN_CTX_get(ctx)
ssl.BN_set_word(zero, 0)
ssl.BN_mod_sub(e, zero, e, order, ctx)
rr = ssl.BN_CTX_get(ctx);
ssl.BN_mod_inverse(rr, r, order, ctx)
sor = ssl.BN_CTX_get(ctx)
ssl.BN_mod_mul(sor, s, rr, order, ctx)
eor = ssl.BN_CTX_get(ctx)
ssl.BN_mod_mul(eor, e, rr, order, ctx)
ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx)
ssl.EC_KEY_set_public_key(eckey, Q)
return eckey
finally:
if ctx: ssl.BN_CTX_free(ctx)
if R: ssl.EC_POINT_free(R)
if O: ssl.EC_POINT_free(O)
if Q: ssl.EC_POINT_free(Q)
def closeLibrary():
import _ctypes import _ctypes
if "FreeLibrary" in dir(_ctypes): if "FreeLibrary" in dir(_ctypes):
_ctypes.FreeLibrary(ssl._lib._handle) _ctypes.FreeLibrary(ssl._lib._handle)
@ -354,7 +373,12 @@ def getMessagePubkey(message, sig):
size = ssl.i2o_ECPublicKey (eckey, 0) size = ssl.i2o_ECPublicKey (eckey, 0)
mb = ctypes.create_string_buffer (size) mb = ctypes.create_string_buffer (size)
ssl.i2o_ECPublicKey (eckey, ctypes.byref (ctypes.pointer (mb))) ssl.i2o_ECPublicKey (eckey, ctypes.byref (ctypes.pointer (mb)))
return mb.raw pub = mb.raw
if time.time()-ssl.time_opened>60*5: # Reopen every 5 min
logging.debug("Reopening OpenSSL...")
closeLibrary()
openLibrary()
return pub
def test(): def test():
sign = "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=" sign = "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ="

View File

@ -1,16 +1,35 @@
import opensslVerify, gevent, time import opensslVerify, gevent, time
from gevent import monkey; monkey.patch_all(thread=False, ssl=False) from gevent import monkey
monkey.patch_all(thread=False, ssl=False)
def test(): def test():
data = "A"*1024 data = "A"*1024
sign = "G2Jo8dDa+jqvJipft9E3kfrAxjESWLBpVtuGIiEBCD/UUyHmRMYNqnlWeOiaHHpja5LOP+U5CanRALfOjCSYIa8=" sign = "G2Jo8dDa+jqvJipft9E3kfrAxjESWLBpVtuGIiEBCD/UUyHmRMYNqnlWeOiaHHpja5LOP+U5CanRALfOjCSYIa8="
for i in range(5*1000): for i in range(2*1000):
if i%1000 == 0: if i%1000 == 0:
print i, len(data) print i, len(data)
data += data+"A" #data += data+"A"
time.sleep(0) time.sleep(0)
pub = opensslVerify.getMessagePubkey(data, sign) pub = opensslVerify.getMessagePubkey(data, sign)
print repr(pub), len(data) print repr(pub), len(data)
gevent.joinall([gevent.spawn(test), gevent.spawn(test)]) while 1:
s = time.time()
gevent.joinall([gevent.spawn(test), gevent.spawn(test)])
try:
import psutil, os
process = psutil.Process(os.getpid())
print "Mem:", process.get_memory_info()[0] / float(2 ** 20)
except:
pass
raw_input("finished, in %.2fs, check memory usage" % (time.time()-s))
opensslVerify.close()
opensslVerify.open()
try:
import psutil, os
process = psutil.Process(os.getpid())
print "Mem:", process.get_memory_info()[0] / float(2 ** 20)
except:
pass
raw_input("closed and openssl, check memory again, press enter to start again")

View File

@ -1,22 +1,23 @@
import os, sys import os, sys
update_after_shutdown = False # If set True then update and restart zeronet after main loop ended update_after_shutdown = False # If set True then update and restart zeronet after main loop ended
# Create necessary files and dirs
if not os.path.isdir("log"): os.mkdir("log")
if not os.path.isdir("data"): os.mkdir("data")
if not os.path.isfile("data/sites.json"): open("data/sites.json", "w").write("{}")
if not os.path.isfile("data/users.json"): open("data/users.json", "w").write("{}")
# Load config # Load config
from Config import config from Config import config
# Create necessary files and dirs
if not os.path.isdir(config.log_dir): os.mkdir(config.log_dir)
if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir)
if not os.path.isfile("%s/sites.json" % config.data_dir): open("%s/sites.json" % config.data_dir, "w").write("{}")
if not os.path.isfile("%s/users.json" % config.data_dir): open("%s/users.json" % config.data_dir, "w").write("{}")
# Setup logging # Setup logging
import logging import logging
if config.action == "main": if config.action == "main":
if os.path.isfile("log/debug.log"): # Simple logrotate if os.path.isfile("%s/debug.log" % config.log_dir): # Simple logrotate
if os.path.isfile("log/debug-last.log"): os.unlink("log/debug-last.log") if os.path.isfile("%s/debug-last.log" % config.log_dir): os.unlink("%s/debug-last.log" % config.log_dir)
os.rename("log/debug.log", "log/debug-last.log") os.rename("%s/debug.log" % config.log_dir, "%s/debug-last.log" % config.log_dir)
logging.basicConfig(format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', level=logging.DEBUG, filename="log/debug.log") logging.basicConfig(format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', level=logging.DEBUG, filename="%s/debug.log" % config.log_dir)
else: else:
logging.basicConfig(level=logging.DEBUG, stream=open(os.devnull,"w")) # No file logging if action is not main logging.basicConfig(level=logging.DEBUG, stream=open(os.devnull,"w")) # No file logging if action is not main
@ -99,8 +100,8 @@ class Actions:
logging.info("Creating directory structure...") logging.info("Creating directory structure...")
from Site import Site from Site import Site
os.mkdir("data/%s" % address) os.mkdir("%s/%s" % (config.data_dir, address))
open("data/%s/index.html" % address, "w").write("Hello %s!" % address) open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address)
logging.info("Creating content.json...") logging.info("Creating content.json...")
site = Site(address) site = Site(address)

View File

@ -13,7 +13,7 @@ def main():
# Try cleanup openssl # Try cleanup openssl
try: try:
if "lib.opensslVerify" in sys.modules: if "lib.opensslVerify" in sys.modules:
sys.modules["lib.opensslVerify"].opensslVerify.close() sys.modules["lib.opensslVerify"].opensslVerify.closeLibrary()
except Exception, err: except Exception, err:
print "Error closing openssl", err print "Error closing openssl", err