Download resuming and proper filename support (bugs 19 and 57)

This patch finishes off the proper filename support, so
remove the "experimental_file_naming" configuration option.

Also add download resuming based on this new code. The
resuming function currently works like this: All active
and queued downloads will not be deleted when gPodder is
closed, and gPodder will ask the user if she wants to
continue downloading at next startup (if not, the partial
files are deleted).

These two changes are very suspicious of breaking some weird
feeds (for proper filenames) and servers (for resuming).

Report any bugs that you encounter. Enjoy! :)
This commit is contained in:
Thomas Perl 2009-02-09 23:26:47 +01:00
parent 8a2fdb1b4d
commit bb59cebe63
8 changed files with 316 additions and 59 deletions

View File

@ -1439,7 +1439,7 @@
<property name="update_policy">GTK_UPDATE_ALWAYS</property>
<property name="snap_to_ticks">False</property>
<property name="wrap">False</property>
<property name="adjustment">3 1 10 1 0 0</property>
<property name="adjustment">3 0 10 1 0 0</property>
</widget>
<packing>
<property name="left_attach">6</property>

View File

@ -116,7 +116,6 @@ gPodderSettings = {
'custom_player_coverart_size' : (int, 176),
'custom_player_coverart_name' : (str, 'folder.jpg'),
'custom_player_coverart_format' : (str, 'JPEG'),
'experimental_file_naming': (bool, False),
'podcast_list_icon_size': (int, 32),
'cmd_all_downloads_complete': (str, ''),
'cmd_download_complete': (str, ''),
@ -127,6 +126,7 @@ gPodderSettings = {
'enable_html_shownotes': (bool, True),
'maemo_enable_gestures': (bool, False),
'sync_disks_after_transfer': (bool, True),
'resume_ask_every_episode': (bool, False),
# Hide the cover/pill from the podcast sidebar when it gets too small
'podcast_sidebar_save_space': (bool, False),

View File

@ -27,6 +27,7 @@
from gpodder.liblogger import log
from gpodder.libgpodder import gl
from gpodder.dbsqlite import db
from gpodder import util
from gpodder import services
from gpodder import resolver
@ -78,6 +79,77 @@ class DownloadURLOpener(urllib.FancyURLopener):
fp.close()
raise gPodderDownloadHTTPError(url, errcode, errmsg)
# The following is based on Python's urllib.py "URLopener.retrieve"
# Also based on http://mail.python.org/pipermail/python-list/2001-October/110069.html
def http_error_206(self, url, fp, errcode, errmsg, headers, data=None):
# The next line is taken from urllib's URLopener.open_http
# method, at the end after the line "if errcode == 200:"
return urllib.addinfourl(fp, headers, 'http:' + url)
def retrieve_resume(self, url, filename, reporthook=None, data=None):
"""retrieve_resume(url) returns (filename, headers) for a local object
or (tempfilename, headers) for a remote object.
The filename argument is REQUIRED (no tempfile creation code here!)
Additionally resumes a download if the local filename exists"""
current_size = 0
tfp = None
if os.path.exists(filename):
try:
current_size = os.path.getsize(filename)
tfp = open(filename, 'ab')
#If the file exists, then only download the remainder
self.addheader('Range', 'bytes=%s-' % (current_size))
except:
log('Cannot open file for resuming: %s', filename, sender=self, traceback=True)
tfp = None
current_size = 0
if tfp is None:
tfp = open(filename, 'wb')
url = urllib.unwrap(urllib.toBytes(url))
fp = self.open(url, data)
headers = fp.info()
# gPodder TODO: we can get the real url via fp.geturl() here
# (if anybody wants to fix filenames in the future)
result = filename, headers
bs = 1024*8
size = -1
read = current_size
blocknum = int(current_size/bs)
if reporthook:
if "content-length" in headers:
size = int(headers["Content-Length"]) + current_size
reporthook(blocknum, bs, size)
while 1:
block = fp.read(bs)
if block == "":
break
read += len(block)
tfp.write(block)
blocknum += 1
if reporthook:
reporthook(blocknum, bs, size)
fp.close()
tfp.close()
del fp
del tfp
# raise exception if actual size does not match content-length header
if size >= 0 and read < size:
raise ContentTooShortError("retrieval incomplete: got only %i out "
"of %i bytes" % (read, size), result)
return result
# end code based on urllib.py
def prompt_user_passwd( self, host, realm):
if self.channel.username or self.channel.password:
log( 'Authenticating as "%s" to "%s" for realm "%s".', self.channel.username, host, realm, sender = self)
@ -103,13 +175,17 @@ class DownloadThread(threading.Thread):
self.notification = notification
self.url = self.episode.url
self.filename = self.episode.local_filename()
self.filename = self.episode.local_filename(create=True)
# Commit the database, so we won't lose the (possibly created) filename
db.commit()
self.tempname = self.filename + '.partial'
# Make an educated guess about the total file size
self.total_size = self.episode.length
self.cancelled = False
self.keep_files = False
self.start_time = 0.0
self.speed = _('Queued')
self.speed_value = 0
@ -122,8 +198,9 @@ class DownloadThread(threading.Thread):
self.limit_rate = gl.config.limit_rate
self.start_blocks = 0
def cancel( self):
def cancel(self, keep_files=False):
self.cancelled = True
self.keep_files = keep_files
def status_updated( self, count, blockSize, totalSize):
if totalSize:
@ -154,7 +231,8 @@ class DownloadThread(threading.Thread):
self.last_update = time.time()
if self.cancelled:
util.delete_file( self.tempname)
if not self.keep_files:
util.delete_file(self.tempname)
raise DownloadCancelledException()
def calculate_speed( self, count, blockSize):
@ -203,6 +281,21 @@ class DownloadThread(threading.Thread):
self.download_id = services.download_status_manager.reserve_download_id()
services.download_status_manager.register_download_id( self.download_id, self)
if os.path.exists(self.tempname):
try:
already_downloaded = os.path.getsize(self.tempname)
if self.total_size > 0:
self.progress = already_downloaded/self.total_size
if already_downloaded > 0:
self.speed = _('Queued (partial)')
except:
pass
else:
# "touch self.tempname", so we also get partial
# files for resuming when the file is queued
open(self.tempname, 'w').close()
# Initial status update
services.download_status_manager.update_status( self.download_id, episode = self.episode.title, url = self.episode.url, speed = self.speed, progress = self.progress)
@ -210,16 +303,19 @@ class DownloadThread(threading.Thread):
try:
try:
if self.cancelled:
# Remove the partial file in case we do
# not want to keep it (e.g. user cancelled)
if not self.keep_files:
util.delete_file(self.tempname)
return
util.delete_file( self.tempname)
(unused, headers) = self.downloader.retrieve( resolver.get_real_download_url(self.url), self.tempname, reporthook = self.status_updated)
(unused, headers) = self.downloader.retrieve_resume(resolver.get_real_download_url(self.url), self.tempname, reporthook=self.status_updated)
if 'content-type' in headers and headers['content-type'] != self.episode.mimetype:
log('Correcting mime type: %s => %s', self.episode.mimetype, headers['content-type'])
self.episode.mimetype = headers['content-type']
# File names are constructed with regard to the mime type.
self.filename = self.episode.local_filename()
self.filename = self.episode.local_filename(create=True, force_update=True)
shutil.move( self.tempname, self.filename)
# Get the _real_ filesize once we actually have the file
@ -242,16 +338,18 @@ class DownloadThread(threading.Thread):
services.download_status_manager.s_release( acquired)
except DownloadCancelledException:
log('Download has been cancelled: %s', self.episode.title, traceback=None, sender=self)
if not self.keep_files:
util.delete_file(self.tempname)
except IOError, ioe:
if self.notification is not None:
title = ioe.strerror
message = _('An error happened while trying to download <b>%s</b>.') % ( saxutils.escape( self.episode.title), )
message = _('An error happened while trying to download <b>%s</b>. Please try again later.') % ( saxutils.escape( self.episode.title), )
self.notification( message, title)
log( 'Error "%s" while downloading "%s": %s', ioe.strerror, self.episode.title, ioe.filename, sender = self)
except gPodderDownloadHTTPError, gdhe:
if self.notification is not None:
title = gdhe.error_message
message = _('An error (HTTP %d) happened while trying to download <b>%s</b>.') % ( gdhe.error_code, saxutils.escape( self.episode.title), )
message = _('An error (HTTP %d) happened while trying to download <b>%s</b>. You can try to resume the download later.') % ( gdhe.error_code, saxutils.escape( self.episode.title), )
self.notification( message, title)
log( 'HTTP error %s while downloading "%s": %s', gdhe.error_code, self.episode.title, gdhe.error_message, sender=self)
except:

View File

@ -690,7 +690,51 @@ class gPodder(GladeWidget):
self.update_feed_cache(force_update=gl.config.update_on_startup)
# Clean up old, orphaned download files
gl.clean_up_downloads(delete_partial=True)
partial_files = gl.find_partial_files()
resumable_episodes = []
if len(partial_files) > 0:
for f in partial_files:
correct_name = os.path.basename(f)[:-len('.partial')] # strip ".partial"
log('Searching episode for file: %s', correct_name, sender=self)
found_episode = False
for c in self.channels:
for e in c.get_all_episodes():
if e.filename == correct_name:
log('Found episode: %s', e.title, sender=self)
resumable_episodes.append(e)
found_episode = True
if found_episode:
break
if found_episode:
break
def remove_partial_file(episode):
fn = episode.local_filename(create=False)
if fn is not None:
util.delete_file(fn+'.partial')
if len(resumable_episodes):
if gl.config.resume_ask_every_episode:
gPodderEpisodeSelector(title = _('Resume downloads'), instructions = _('There are unfinished downloads from your last session. Pick the ones you want to resume.'), \
episodes = resumable_episodes, \
stock_ok_button = 'gpodder-download', callback = self.download_episode_list, remove_callback=remove_partial_file)
else:
if len(resumable_episodes) == 0:
question = _('There is one partially downloaded episode. Do you want to continue downloading it?')
else:
question = _('There are %d partially downloaded episodes. Do you want to continue downloading them?') % (len(resumable_episodes))
if self.show_confirmation(question, _('Resume downloads from last session')):
self.download_episode_list(resumable_episodes)
else:
for episode in resumable_episodes:
remove_partial_file(episode)
gl.clean_up_downloads(delete_partial=False)
else:
gl.clean_up_downloads(delete_partial=True)
# Start the auto-update procedure
self.auto_update_procedure(first_run=True)
@ -987,13 +1031,21 @@ class gPodder(GladeWidget):
def save_episode_as_file( self, url, *args):
episode = self.active_channel.find_episode(url)
folder = self.folder_for_saving_episodes
(result, folder) = self.show_copy_dialog(src_filename=episode.local_filename(), dst_filename=episode.sync_filename(), dst_directory=folder)
self.folder_for_saving_episodes = folder
if episode.was_downloaded(and_exists=True):
folder = self.folder_for_saving_episodes
copy_from = episode.local_filename(create=False)
assert copy_from is not None
(result, folder) = self.show_copy_dialog(src_filename=copy_from, dst_filename=episode.sync_filename(), dst_directory=folder)
self.folder_for_saving_episodes = folder
def copy_episode_bluetooth(self, url, *args):
episode = self.active_channel.find_episode(url)
filename = episode.local_filename()
if not episode.was_downloaded(and_exists=True):
log('Cannot copy episode via bluetooth (does not exist!)', sender=self)
filename = episode.local_filename(create=False)
assert filename is not None
if gl.config.bluetooth_use_device_address:
device = gl.config.bluetooth_device_address
@ -1344,7 +1396,6 @@ class gPodder(GladeWidget):
for path in paths:
url = model.get_value( model.get_iter( path), 0)
local_filename = model.get_value( model.get_iter( path), 8)
episode = podcastItem.load(url, self.active_channel)
@ -1877,7 +1928,7 @@ class gPodder(GladeWidget):
title = _('Quit gPodder')
if downloading:
message = _('You are downloading episodes. If you close gPodder now, the downloads will be aborted.')
message = _('You are downloading episodes. You can resume downloads the next time you start gPodder. Do you want to quit now?')
else:
message = _('Do you really want to quit gPodder now?')
@ -1911,7 +1962,7 @@ class gPodder(GladeWidget):
else:
self.show_message(_('Please check your permissions and free disk space.'), _('Error saving podcast list'))
services.download_status_manager.cancel_all()
services.download_status_manager.cancel_all(keep_files=True)
self.gPodder.hide()
while gtk.events_pending():
gtk.main_iteration(False)
@ -2097,7 +2148,6 @@ class gPodder(GladeWidget):
services.download_status_manager.start_batch_mode()
for episode in episodes:
log('Downloading episode: %s', episode.title, sender = self)
filename = episode.local_filename()
if not episode.was_downloaded(and_exists=True) and not services.download_status_manager.is_download_in_progress( episode.url):
download.DownloadThread(episode.channel, episode, self.notification).start()
services.download_status_manager.end_batch_mode()
@ -2218,7 +2268,9 @@ class gPodder(GladeWidget):
free_space = device.get_free_space()
for episode in episodes:
if not device.episode_on_device(episode) and not (sync_all_episodes and gl.config.only_sync_not_played and episode.is_played):
total_size += util.calculate_size(str(episode.local_filename()))
filename = episode.local_filename(create=False)
if filename is not None:
total_size += util.calculate_size(str(filename))
if total_size > free_space:
# can be negative because of the 10 MiB for reserved for the iTunesDB
@ -2751,9 +2803,7 @@ class gPodder(GladeWidget):
self.on_sync_to_ipod_activate(widget, episodes)
elif do_playback:
for episode in episodes:
# Make sure to mark the episode as downloaded
if os.path.exists(episode.local_filename()):
episode.channel.addDownloadedItem(episode)
if episode.was_downloaded(and_exists=True):
self.playback_episode(episode)
elif gl.config.enable_streaming:
self.playback_episode(episode, stream=True)
@ -2879,7 +2929,7 @@ class gPodder(GladeWidget):
# now, clear local db cache so we can re-read it
self.updateComboBox()
except:
log( 'Error while deleting (some) downloads.')
log( 'Error while deleting (some) downloads.', traceback=True, sender=self)
# only delete partial files if we do not have any downloads in progress
delete_partial = not services.download_status_manager.has_items()
@ -3607,7 +3657,7 @@ class gPodderEpisode(GladeWidget):
else:
self.download_progress.hide_all()
self.btnCancel.hide_all()
if os.path.exists(self.episode.local_filename()):
if self.episode.was_downloaded(and_exists=True):
if self.episode.file_type() in ('audio', 'video'):
self.btnPlay.set_label(gtk.STOCK_MEDIA_PLAY)
else:

View File

@ -287,6 +287,9 @@ class gPodderLib(object):
def format_filesize(self, bytesize, digits=2):
return util.format_filesize(bytesize, self.config.use_si_units, digits)
def find_partial_files(self):
return glob.glob(os.path.join(self.downloaddir, '*', '*.partial'))
def clean_up_downloads(self, delete_partial=False):
# Clean up temporary files left behind by old gPodder versions
temporary_files = glob.glob('%s/*/.tmp-*' % self.downloaddir)
@ -349,7 +352,8 @@ class gPodderLib(object):
db.save_episode(episode)
filename = episode.url
else:
filename = episode.local_filename()
filename = episode.local_filename(create=False)
assert filename is not None
db.mark_episode(episode.url, is_played=True)
if gpodder.interface == gpodder.MAEMO and not self.config.maemo_allow_custom_player:

View File

@ -336,11 +336,14 @@ class podcastChannel(object):
f.write('#EXTM3U\n')
for episode in downloaded_episodes:
filename = episode.local_filename()
if os.path.dirname(filename).startswith(os.path.dirname(m3u_filename)):
filename = filename[len(os.path.dirname(m3u_filename)+os.sep):]
f.write('#EXTINF:0,'+self.title+' - '+episode.title+' ('+episode.cute_pubdate()+')\n')
f.write(filename+'\n')
if episode.was_downloaded(and_exists=True):
filename = episode.local_filename(create=False)
assert filename is not None
if os.path.dirname(filename).startswith(os.path.dirname(m3u_filename)):
filename = filename[len(os.path.dirname(m3u_filename)+os.sep):]
f.write('#EXTINF:0,'+self.title+' - '+episode.title+' ('+episode.cute_pubdate()+')\n')
f.write(filename+'\n')
f.close()
def addDownloadedItem(self, item):
@ -351,7 +354,9 @@ class podcastChannel(object):
# Update metadata on file (if possible and wanted)
if gl.config.update_tags and libtagupdate.tagging_supported():
filename = item.local_filename()
filename = item.local_filename(create=False)
assert filename is not None
try:
libtagupdate.update_metadata_on_file(filename, title=item.title, artist=self.title, genre='Podcast')
except Exception, e:
@ -426,7 +431,7 @@ class podcastChannel(object):
new_iter = new_model.append((item.url, item.title, filelength,
True, None, item.cute_pubdate(), description, util.remove_html_tags(item.description),
item.local_filename(), item.extension()))
'XXXXXXXXXXXXXUNUSEDXXXXXXXXXXXXXXXXXXX', item.extension()))
self.iter_set_downloading_columns( new_model, new_iter, episode=item)
urls.append(item.url)
@ -470,7 +475,7 @@ class podcastChannel(object):
# if the foldername has not been set, check if the (old) md5 filename exists
if self.foldername is None and os.path.exists(os.path.join(gl.downloaddir, urldigest)):
log('Found pre-0.14.0 download folder for %s: %s', self.title, urldigest, sender=self)
log('Found pre-0.15.0 download folder for %s: %s', self.title, urldigest, sender=self)
self.foldername = urldigest
# we have a valid, new folder name in "current_try" -> use that!
@ -523,7 +528,11 @@ class podcastChannel(object):
episode = db.load_episode(url, lambda c: podcastItem.create_from_dict(c, self))
if episode is not None:
util.delete_file(episode.local_filename())
filename = episode.local_filename(create=False)
if filename is not None:
util.delete_file(filename)
else:
log('Cannot delete episode: %s (I have no filename!)', episode.title, sender=self)
episode.set_state(db.STATE_DELETED)
self.update_m3u_playlist()
@ -531,6 +540,7 @@ class podcastChannel(object):
class podcastItem(object):
"""holds data for one object in a channel"""
MAX_FILENAME_LENGTH = 200
@staticmethod
def load(url, channel):
@ -675,7 +685,7 @@ class podcastItem(object):
return saxutils.escape(self.title)
def age_in_days(self):
return util.file_age_in_days(self.local_filename())
return util.file_age_in_days(self.local_filename(create=False))
def is_old(self):
return self.age_in_days() > gl.config.episode_old_age
@ -698,29 +708,110 @@ class podcastItem(object):
except:
log('Cannot delete episode from disk: %s', self.title, traceback=True, sender=self)
def local_filename( self):
@classmethod
def find_unique_file_name(cls, url, filename, extension):
current_try = util.sanitize_filename(filename, cls.MAX_FILENAME_LENGTH)+extension
next_try_id = 2
lookup_url = None
while db.episode_filename_exists(current_try):
if next_try_id == 2:
# If we arrive here, current_try has a collision, so
# try to resolve the URL for a better basename
log('Filename collision: %s - trying to resolve...', current_try)
url = util.get_real_url(url)
(episode_filename, extension_UNUSED) = util.filename_from_url(url)
current_try = util.sanitize_filename(episode_filename, cls.MAX_FILENAME_LENGTH)
if not db.episode_filename_exists(current_try):
log('Filename %s is available - collision resolved.', current_try)
return current_try
else:
log('Continuing search with %s as basename...', current_try)
current_try = '%s (%d)%s' % (filename, next_try_id, extension)
next_try_id += 1
return current_try
def local_filename(self, create, force_update=False):
"""Get (and possibly generate) the local saving filename
Pass create=True if you want this function to generate a
new filename if none exists. You only want to do this when
planning to create/download the file after calling this function.
Normally, you should pass create=False. This will only
create a filename when the file already exists from a previous
version of gPodder (where we used md5 filenames). If the file
does not exist (and the filename also does not exist), this
function will return None.
If you pass force_update=True to this function, it will try to
find a new (better) filename and move the current file if this
is the case. This is useful if (during the download) you get
more information about the file, e.g. the mimetype and you want
to include this information in the file name generation process.
The generated filename is stored in the database for future access.
"""
ext = self.extension()
# For compatibility with already-downloaded episodes,
# we accept md5 filenames if they are downloaded now.
md5_filename = os.path.join(self.channel.save_dir, hashlib.md5(self.url).hexdigest()+ext)
if os.path.exists(md5_filename) or not gl.config.experimental_file_naming:
return md5_filename
# For compatibility with already-downloaded episodes, we
# have to know md5 filenames if they are downloaded already
urldigest = hashlib.md5(self.url).hexdigest()
# If the md5 filename does not exist,
( episode, e ) = util.filename_from_url(self.url)
episode = util.sanitize_filename(episode)
# add the first 32 bits of the sha1 hash of the url for uniqueness
episode += '_%s' % hashlib.sha1(self.url).hexdigest()[:8].upper()
episode += ext # finally add the extension
if not create and self.filename is None:
urldigest_filename = os.path.join(self.channel.save_dir, urldigest+ext)
if os.path.exists(urldigest_filename):
# The file exists, so set it up in our database
log('Recovering pre-0.15.0 file: %s', urldigest_filename, sender=self)
self.filename = urldigest+ext
self.auto_filename = 1
self.save()
return urldigest_filename
return None
# If the episode filename looks suspicious,
# we still return the md5 filename to be on
# the safe side of the fence ;)
if len(episode) == 0 or episode.startswith('redirect.'):
return md5_filename
filename = os.path.join(self.channel.save_dir, episode)
return filename
if self.filename is None or force_update or (self.auto_filename and self.filename == urldigest+ext):
# Try to find a new filename for the current file
(episode_filename, extension_UNUSED) = util.filename_from_url(self.url)
fn_template = util.sanitize_filename(episode_filename, self.MAX_FILENAME_LENGTH)
if 'redirect' in fn_template:
# This looks like a redirection URL - force URL resolving!
log('Looks like a redirection to me: %s', self.url, sender=self)
url = util.get_real_url(self.url)
log('Redirection resolved to: %s', url, sender=self)
(episode_filename, extension_UNUSED) = util.filename_from_url(url)
fn_template = util.sanitize_filename(episode_filename, self.MAX_FILENAME_LENGTH)
# If the basename is empty, use the md5 hexdigest of the URL
if len(fn_template) == 0 or fn_template.startswith('redirect.'):
log('Report to bugs.gpodder.org: Podcast at %s with episode URL: %s', self.channel.url, self.url, sender=self)
fn_template = urldigest
# Find a unique filename for this episode
wanted_filename = self.find_unique_file_name(self.url, fn_template, ext)
# We populate the filename field the first time - does the old file still exist?
if self.filename is None and os.path.exists(os.path.join(self.channel.save_dir, urldigest+ext)):
log('Found pre-0.15.0 downloaded file: %s', urldigest, sender=self)
self.filename = urldigest+ext
# The old file exists, but we have decided to want a different filename
if self.filename is not None and wanted_filename != self.filename:
# there might be an old download folder crawling around - move it!
new_file_name = os.path.join(self.channel.save_dir, wanted_filename)
old_file_name = os.path.join(self.channel.save_dir, self.filename)
if os.path.exists(old_file_name) and not os.path.exists(new_file_name):
log('Renaming %s => %s', old_file_name, new_file_name, sender=self)
os.rename(old_file_name, new_file_name)
else:
log('Warning: %s exists or %s does not.', new_file_name, old_file_name, sender=self)
log('Updating filename of %s to "%s".', self.url, wanted_filename, sender=self)
self.filename = wanted_filename
self.save()
return os.path.join(self.channel.save_dir, self.filename)
def extension( self):
( filename, ext ) = util.filename_from_url(self.url)
@ -740,7 +831,11 @@ class podcastItem(object):
db.mark_episode(self.url, is_played=True)
def file_exists(self):
return os.path.exists(self.local_filename())
filename = self.local_filename(create=False)
if filename is None:
return False
else:
return os.path.exists(filename)
def was_downloaded(self, and_exists=False):
if self.state != db.STATE_DOWNLOADED:
@ -797,8 +892,11 @@ class podcastItem(object):
pubdate_prop = property(fget=cute_pubdate)
def calculate_filesize( self):
filename = self.local_filename(create=False)
if filename is None:
log('calculate_filesized called, but filename is None!', sender=self)
try:
self.length = os.path.getsize(self.local_filename())
self.length = os.path.getsize(filename)
except:
log( 'Could not get filesize for %s.', self.url)

View File

@ -504,10 +504,10 @@ class DownloadStatusManager(ObservableService):
return False
def cancel_all( self):
def cancel_all(self, keep_files=False):
for element in self.status_list:
self.status_list[element]['iter'] = None
self.status_list[element]['thread'].cancel()
self.status_list[element]['thread'].cancel(keep_files)
# clear the tree model after cancelling
util.idle_add(self.tree_model.clear)
self.downloads_done_bytes = 0

View File

@ -622,6 +622,13 @@ def format_desktop_command( command, filename):
return '%s "%s"' % ( command, filename )
def get_real_url(url):
"""
Gets the real URL of a file and resolves all redirects.
"""
return urllib.urlopen(url).geturl()
def find_command( command):
"""
Searches the system's PATH for a specific command that is