ZeroNet/src/Peer/Peer.py

285 lines
11 KiB
Python
Raw Normal View History

import logging
import time
import gevent
from cStringIO import StringIO
from Debug import Debug
from Config import config
from util import helper
from PeerHashfield import PeerHashfield
if config.use_tempfiles:
import tempfile
# Communicate remote peers
class Peer(object):
__slots__ = (
"ip", "port", "site", "key", "connection", "time_found", "time_response", "time_hashfield", "time_added",
"time_my_hashfield_sent", "last_ping", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time"
)
2015-06-18 00:01:56 +02:00
def __init__(self, ip, port, site=None):
self.ip = ip
self.port = port
self.site = site
self.key = "%s:%s" % (ip, port)
self.connection = None
self.hashfield = PeerHashfield() # Got optional files hash_id
self.time_hashfield = None # Last time peer's hashfiled downloaded
self.time_my_hashfield_sent = None # Last time my hashfield sent to peer
self.time_found = time.time() # Time of last found in the torrent tracker
self.time_response = None # Time of last successful response from peer
self.time_added = time.time()
2015-06-18 00:03:11 +02:00
self.last_ping = None # Last response time for ping
2015-06-18 00:01:56 +02:00
2015-06-18 00:03:11 +02:00
self.connection_error = 0 # Series of connection error
self.hash_failed = 0 # Number of bad files from peer
self.download_bytes = 0 # Bytes downloaded
self.download_time = 0 # Time spent to download
2015-06-18 00:01:56 +02:00
def log(self, text):
if self.site:
self.site.log.debug("%s:%s %s" % (self.ip, self.port, text))
else:
logging.debug("%s:%s %s" % (self.ip, self.port, text))
# Connect to host
def connect(self, connection=None):
if self.connection:
self.log("Getting connection (Closing %s)..." % self.connection)
self.connection.close()
else:
self.log("Getting connection...")
2015-06-18 00:03:11 +02:00
if connection: # Connection specified
2015-06-18 00:01:56 +02:00
self.connection = connection
else: # Try to find from connection pool or create new connection
self.connection = None
try:
self.connection = self.site.connection_server.getConnection(self.ip, self.port)
2015-06-18 00:01:56 +02:00
except Exception, err:
self.onConnectionError()
self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" %
(Debug.formatException(err), self.connection_error, self.hash_failed))
2015-06-18 00:01:56 +02:00
self.connection = None
# Check if we have connection to peer
def findConnection(self):
2015-06-18 00:03:11 +02:00
if self.connection and self.connection.connected: # We have connection to peer
2015-06-18 00:01:56 +02:00
return self.connection
2015-06-18 00:03:11 +02:00
else: # Try to find from other sites connections
self.connection = self.site.connection_server.getConnection(self.ip, self.port, create=False)
2015-06-18 00:01:56 +02:00
return self.connection
def __str__(self):
return "Peer:%-12s" % self.ip
2015-06-18 00:01:56 +02:00
def __repr__(self):
return "<%s>" % self.__str__()
def packMyAddress(self):
return helper.packAddress(self.ip, self.port)
2015-06-18 00:01:56 +02:00
# Found a peer on tracker
def found(self):
self.time_found = time.time()
2015-06-18 00:01:56 +02:00
# Send a command to peer and return response value
def request(self, cmd, params={}, stream_to=None):
2015-06-18 00:01:56 +02:00
if not self.connection or self.connection.closed:
self.connect()
if not self.connection:
self.onConnectionError()
return None # Connection failed
2015-06-18 00:01:56 +02:00
for retry in range(0, 3): # Retry 3 times
2015-06-18 00:01:56 +02:00
try:
res = self.connection.request(cmd, params, stream_to)
if not res:
2015-06-18 00:01:56 +02:00
raise Exception("Send error")
if "error" in res:
self.log("%s error: %s" % (cmd, res["error"]))
2015-06-18 00:01:56 +02:00
self.onConnectionError()
else: # Successful request, reset connection error num
2015-06-18 00:01:56 +02:00
self.connection_error = 0
self.time_response = time.time()
return res
2015-06-18 00:01:56 +02:00
except Exception, err:
2015-06-18 00:03:11 +02:00
if type(err).__name__ == "Notify": # Greenlet killed by worker
2015-06-18 00:01:56 +02:00
self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd))
break
else:
self.onConnectionError()
self.log(
"%s (connection_error: %s, hash_failed: %s, retry: %s)" %
(Debug.formatException(err), self.connection_error, self.hash_failed, retry)
)
time.sleep(1 * retry)
2015-06-18 00:01:56 +02:00
self.connect()
return None # Failed after 4 retry
# Get a file content from peer
def getFile(self, site, inner_path):
# Use streamFile if client supports it
if config.stream_downloads and self.connection and self.connection.handshake and self.connection.handshake["rev"] > 310:
return self.streamFile(site, inner_path)
2015-06-18 00:01:56 +02:00
location = 0
if config.use_tempfiles:
buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b')
else:
buff = StringIO()
2015-06-18 00:01:56 +02:00
s = time.time()
2015-06-18 00:06:41 +02:00
while True: # Read in 512k parts
res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location})
if not res or "body" not in res: # Error
2015-06-18 00:01:56 +02:00
return False
buff.write(res["body"])
res["body"] = None # Save memory
if res["location"] == res["size"]: # End of file
2015-06-18 00:01:56 +02:00
break
else:
location = res["location"]
self.download_bytes += res["location"]
self.download_time += (time.time() - s)
self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + res["location"]
buff.seek(0)
return buff
# Download file out of msgpack context to save memory and cpu
def streamFile(self, site, inner_path):
location = 0
if config.use_tempfiles:
buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b')
else:
buff = StringIO()
s = time.time()
while True: # Read in 512k parts
res = self.request("streamFile", {"site": site, "inner_path": inner_path, "location": location}, stream_to=buff)
if not res: # Error
self.log("Invalid response: %s" % res)
return False
if res["location"] == res["size"]: # End of file
break
else:
location = res["location"]
self.download_bytes += res["location"]
2015-06-18 00:01:56 +02:00
self.download_time += (time.time() - s)
self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + res["location"]
2015-06-18 00:01:56 +02:00
buff.seek(0)
return buff
# Send a ping request
def ping(self):
response_time = None
2015-06-18 00:04:49 +02:00
for retry in range(1, 3): # Retry 3 times
2015-06-18 00:01:56 +02:00
s = time.time()
2015-06-18 00:06:41 +02:00
with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception
res = self.request("ping")
2015-06-18 00:01:56 +02:00
if res and "body" in res and res["body"] == "Pong!":
response_time = time.time() - s
2015-06-18 00:04:49 +02:00
break # All fine, exit from for loop
2015-06-18 00:01:56 +02:00
# Timeout reached or bad response
self.onConnectionError()
self.connect()
time.sleep(1)
if response_time:
self.log("Ping: %.3f" % response_time)
else:
self.log("Ping failed")
self.last_ping = response_time
return response_time
# Request peer exchange from peer
def pex(self, site=None, need_num=5):
if not site:
site = self.site # If no site defined request peers for this site
2015-06-18 00:04:49 +02:00
# give him/her 5 connectible peers
packed_peers = [peer.packMyAddress() for peer in self.site.getConnectablePeers(5)]
res = self.request("pex", {"site": site.address, "peers": packed_peers, "need": need_num})
if not res or "error" in res:
2015-06-18 00:01:56 +02:00
return False
added = 0
for peer in res.get("peers", []):
address = helper.unpackAddress(peer)
2015-06-18 00:01:56 +02:00
if site.addPeer(*address):
added += 1
if added:
self.log("Added peers using pex: %s" % added)
return added
# List modified files since the date
# Return: {inner_path: modification date,...}
def listModified(self, since):
return self.request("listModified", {"since": since, "site": self.site.address})
def updateHashfield(self, force=False):
# Don't update hashfield again in 15 min
if self.time_hashfield and time.time() - self.time_hashfield > 60 * 15 and not force:
return False
self.time_hashfield = time.time()
res = self.request("getHashfield", {"site": self.site.address})
if not res or "error" in res:
return False
self.hashfield.replaceFromString(res["hashfield_raw"])
return self.hashfield
# Find peers for hashids
# Return: {hash1: ["ip:port", "ip:port",...],...}
def findHashIds(self, hash_ids):
res = self.request("findHashIds", {"site": self.site.address, "hash_ids": hash_ids})
if not res or "error" in res:
return False
return {key: map(helper.unpackAddress, val) for key, val in res["peers"].iteritems()}
# Send my hashfield to peer
# Return: True if sent
def sendMyHashfield(self):
if self.connection and self.connection.handshake.get("rev", 0) < 510:
return False # Not supported
if self.time_my_hashfield_sent and self.site.content_manager.hashfield.time_changed <= self.time_my_hashfield_sent:
return False # Peer already has the latest hashfield
res = self.request("setHashfield", {"site": self.site.address, "hashfield_raw": self.site.content_manager.hashfield.tostring()})
if not res or "error" in res:
return False
else:
self.time_my_hashfield_sent = time.time()
return True
# Stop and remove from site
def remove(self):
self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed))
if self.site and self.key in self.site.peers:
del(self.site.peers[self.key])
if self.connection:
self.connection.close()
2015-06-18 00:01:56 +02:00
# - EVENTS -
# On connection error
def onConnectionError(self):
self.connection_error += 1
if self.connection_error >= 3: # Dead peer
self.remove()
# Done working with peer
def onWorkerDone(self):
pass