2015-01-12 02:03:45 +01:00
|
|
|
'''
|
|
|
|
Based on the specification at http://bittorrent.org/beps/bep_0015.html
|
|
|
|
'''
|
2017-07-31 12:28:55 +02:00
|
|
|
import binascii
|
2015-01-12 02:03:45 +01:00
|
|
|
import random
|
|
|
|
import struct
|
|
|
|
import time
|
|
|
|
import socket
|
|
|
|
from collections import defaultdict
|
|
|
|
|
|
|
|
|
|
|
|
__version__ = '0.0.1'
|
|
|
|
|
|
|
|
CONNECT = 0
|
|
|
|
ANNOUNCE = 1
|
|
|
|
SCRAPE = 2
|
|
|
|
ERROR = 3
|
|
|
|
|
|
|
|
|
|
|
|
class UdpTrackerClientException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class UdpTrackerClient:
|
|
|
|
|
|
|
|
def __init__(self, host, port):
|
|
|
|
self.host = host
|
|
|
|
self.port = port
|
|
|
|
self.peer_port = 6881
|
|
|
|
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
|
|
self.conn_id = 0x41727101980
|
|
|
|
self.transactions = {}
|
|
|
|
self.peer_id = self._generate_peer_id()
|
Version 0.3.4, Rev656, CryptMessage plugin for AES and ECIES encryption, Added pyelliptic lib for OpenSSSL based encryption methods, Test CryptMessage plugin, Force reload content.json before signing and after write, Escaped Sql IN queries support, Test Sql parameter escaping, ui_websocket Test fixture, Plugin testing support, Always return websocket errors as dict, Wait for file on weboscket fileGet command if its already in bad_files queue, PushState and ReplaceState url manipulation support in wrapper API, Per auth-address localstorage, Longer timeout for udp tracker query
2015-12-10 21:36:20 +01:00
|
|
|
self.timeout = 9
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
def connect(self):
|
|
|
|
return self._send(CONNECT)
|
|
|
|
|
|
|
|
def announce(self, **kwargs):
|
|
|
|
if not kwargs:
|
|
|
|
raise UdpTrackerClientException('arguments missing')
|
|
|
|
args = {
|
|
|
|
'peer_id': self.peer_id,
|
|
|
|
'downloaded': 0,
|
|
|
|
'left': 0,
|
|
|
|
'uploaded': 0,
|
|
|
|
'event': 0,
|
|
|
|
'key': 0,
|
|
|
|
'num_want': 10,
|
|
|
|
'ip_address': 0,
|
|
|
|
'port': self.peer_port,
|
|
|
|
}
|
|
|
|
args.update(kwargs)
|
|
|
|
|
|
|
|
fields = 'info_hash peer_id downloaded left uploaded event ' \
|
|
|
|
'ip_address key num_want port'
|
|
|
|
|
|
|
|
# Check and raise if missing fields
|
|
|
|
self._check_fields(args, fields)
|
|
|
|
|
|
|
|
# Humans tend to use hex representations of the hash. Wasteful humans.
|
2019-03-16 02:06:28 +01:00
|
|
|
args['info_hash'] = args['info_hash']
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
values = [args[a] for a in fields.split()]
|
2019-03-16 02:06:28 +01:00
|
|
|
values[1] = values[1].encode("utf8")
|
2015-01-12 02:03:45 +01:00
|
|
|
payload = struct.pack('!20s20sQQQLLLLH', *values)
|
|
|
|
return self._send(ANNOUNCE, payload)
|
|
|
|
|
|
|
|
def scrape(self, info_hash_list):
|
|
|
|
if len(info_hash_list) > 74:
|
|
|
|
raise UdpTrackerClientException('Max info_hashes is 74')
|
|
|
|
|
|
|
|
payload = ''
|
|
|
|
for info_hash in info_hash_list:
|
|
|
|
payload += info_hash
|
|
|
|
|
|
|
|
trans = self._send(SCRAPE, payload)
|
|
|
|
trans['sent_hashes'] = info_hash_list
|
|
|
|
return trans
|
|
|
|
|
|
|
|
def poll_once(self):
|
|
|
|
self.sock.settimeout(self.timeout)
|
|
|
|
try:
|
|
|
|
response = self.sock.recv(10240)
|
|
|
|
except socket.timeout:
|
|
|
|
return
|
|
|
|
|
|
|
|
header = response[:8]
|
|
|
|
payload = response[8:]
|
|
|
|
action, trans_id = struct.unpack('!LL', header)
|
|
|
|
try:
|
|
|
|
trans = self.transactions[trans_id]
|
|
|
|
except KeyError:
|
|
|
|
self.error('transaction_id not found')
|
|
|
|
return
|
|
|
|
trans['response'] = self._process_response(action, payload, trans)
|
|
|
|
trans['completed'] = True
|
|
|
|
del self.transactions[trans_id]
|
|
|
|
return trans
|
|
|
|
|
|
|
|
def error(self, message):
|
2018-08-26 22:55:31 +02:00
|
|
|
raise Exception('error: {}'.format(message))
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
def _send(self, action, payload=None):
|
|
|
|
if not payload:
|
2019-03-16 02:06:28 +01:00
|
|
|
payload = b''
|
2015-01-12 02:03:45 +01:00
|
|
|
trans_id, header = self._request_header(action)
|
|
|
|
self.transactions[trans_id] = trans = {
|
|
|
|
'action': action,
|
|
|
|
'time': time.time(),
|
|
|
|
'payload': payload,
|
|
|
|
'completed': False,
|
|
|
|
}
|
2016-01-19 20:42:00 +01:00
|
|
|
self.sock.connect((self.host, self.port))
|
|
|
|
self.sock.send(header + payload)
|
2015-01-12 02:03:45 +01:00
|
|
|
return trans
|
|
|
|
|
|
|
|
def _request_header(self, action):
|
|
|
|
trans_id = random.randint(0, (1 << 32) - 1)
|
|
|
|
return trans_id, struct.pack('!QLL', self.conn_id, action, trans_id)
|
|
|
|
|
|
|
|
def _process_response(self, action, payload, trans):
|
|
|
|
if action == CONNECT:
|
|
|
|
return self._process_connect(payload, trans)
|
|
|
|
elif action == ANNOUNCE:
|
|
|
|
return self._process_announce(payload, trans)
|
|
|
|
elif action == SCRAPE:
|
|
|
|
return self._process_scrape(payload, trans)
|
|
|
|
elif action == ERROR:
|
2018-08-26 22:55:31 +02:00
|
|
|
return self._process_error(payload, trans)
|
2015-01-12 02:03:45 +01:00
|
|
|
else:
|
|
|
|
raise UdpTrackerClientException(
|
|
|
|
'Unknown action response: {}'.format(action))
|
|
|
|
|
|
|
|
def _process_connect(self, payload, trans):
|
|
|
|
self.conn_id = struct.unpack('!Q', payload)[0]
|
|
|
|
return self.conn_id
|
|
|
|
|
|
|
|
def _process_announce(self, payload, trans):
|
|
|
|
response = {}
|
|
|
|
|
|
|
|
info_struct = '!LLL'
|
|
|
|
info_size = struct.calcsize(info_struct)
|
|
|
|
info = payload[:info_size]
|
|
|
|
interval, leechers, seeders = struct.unpack(info_struct, info)
|
|
|
|
|
|
|
|
peer_data = payload[info_size:]
|
|
|
|
peer_struct = '!LH'
|
|
|
|
peer_size = struct.calcsize(peer_struct)
|
2019-03-16 02:06:28 +01:00
|
|
|
peer_count = int(len(peer_data) / peer_size)
|
2015-01-12 02:03:45 +01:00
|
|
|
peers = []
|
|
|
|
|
2019-03-16 02:06:28 +01:00
|
|
|
for peer_offset in range(peer_count):
|
2015-01-12 02:03:45 +01:00
|
|
|
off = peer_size * peer_offset
|
|
|
|
peer = peer_data[off:off + peer_size]
|
|
|
|
addr, port = struct.unpack(peer_struct, peer)
|
|
|
|
peers.append({
|
|
|
|
'addr': socket.inet_ntoa(struct.pack('!L', addr)),
|
|
|
|
'port': port,
|
|
|
|
})
|
|
|
|
|
|
|
|
return {
|
|
|
|
'interval': interval,
|
|
|
|
'leechers': leechers,
|
|
|
|
'seeders': seeders,
|
|
|
|
'peers': peers,
|
|
|
|
}
|
|
|
|
|
|
|
|
def _process_scrape(self, payload, trans):
|
|
|
|
info_struct = '!LLL'
|
|
|
|
info_size = struct.calcsize(info_struct)
|
|
|
|
info_count = len(payload) / info_size
|
|
|
|
hashes = trans['sent_hashes']
|
|
|
|
response = {}
|
2019-03-16 02:06:28 +01:00
|
|
|
for info_offset in range(info_count):
|
2015-01-12 02:03:45 +01:00
|
|
|
off = info_size * info_offset
|
|
|
|
info = payload[off:off + info_size]
|
|
|
|
seeders, completed, leechers = struct.unpack(info_struct, info)
|
|
|
|
response[hashes[info_offset]] = {
|
|
|
|
'seeders': seeders,
|
|
|
|
'completed': completed,
|
|
|
|
'leechers': leechers,
|
|
|
|
}
|
|
|
|
return response
|
|
|
|
|
|
|
|
def _process_error(self, payload, trans):
|
|
|
|
'''
|
|
|
|
I haven't seen this action type be sent from a tracker, but I've left
|
|
|
|
it here for the possibility.
|
|
|
|
'''
|
|
|
|
self.error(payload)
|
2018-08-26 22:55:31 +02:00
|
|
|
return False
|
2015-01-12 02:03:45 +01:00
|
|
|
|
|
|
|
def _generate_peer_id(self):
|
|
|
|
'''http://www.bittorrent.org/beps/bep_0020.html'''
|
|
|
|
peer_id = '-PU' + __version__.replace('.', '-') + '-'
|
|
|
|
remaining = 20 - len(peer_id)
|
2019-03-16 02:06:28 +01:00
|
|
|
numbers = [str(random.randint(0, 9)) for _ in range(remaining)]
|
2015-01-12 02:03:45 +01:00
|
|
|
peer_id += ''.join(numbers)
|
|
|
|
assert(len(peer_id) == 20)
|
|
|
|
return peer_id
|
|
|
|
|
|
|
|
def _check_fields(self, args, fields):
|
|
|
|
for f in fields:
|
|
|
|
try:
|
|
|
|
args.get(f)
|
|
|
|
except KeyError:
|
|
|
|
raise UdpTrackerClientException('field missing: {}'.format(f))
|
|
|
|
|