Logging: Deprecate liblogger, use standard logging module

This commit is contained in:
Thomas Perl 2011-07-15 16:32:06 +02:00
parent a650971d79
commit 8e87300c04
26 changed files with 298 additions and 328 deletions

View File

@ -136,8 +136,9 @@ if __name__ == '__main__':
gpodder.ui_folders.insert(0, os.path.join(ui_folder, 'desktop'))
if options.verbose:
from gpodder.liblogger import enable_verbose
enable_verbose()
import logging
FMT = '%(created)f [%(name)s] %(levelname)s: %(message)s'
logging.basicConfig(format=FMT, level=logging.INFO)
if options.qml:
from gpodder import qmlui

View File

@ -6,25 +6,26 @@
# gets called and what the parameters of each hook are.
import gpodder
import logging
from gpodder.liblogger import log
logger = logging.getLogger(__name__)
class gPodderHooks(object):
def __init__(self):
log('Example extension is initializing.')
logger.info('Example extension is initializing.')
def on_podcast_updated(self, podcast):
log(u'on_podcast_updated(%s)' % podcast.title)
logger.info('on_podcast_updated(%s)', podcast.title)
def on_podcast_save(self, podcast):
log(u'on_podcast_save(%s)' % podcast.title)
logger.info('on_podcast_save(%s)', podcast.title)
def on_episode_downloaded(self, episode):
log(u'on_episode_downloaded(%s)' % episode.title)
logger.info('on_episode_downloaded(%s)', episode.title)
def on_episode_save(self, episode):
log(u'on_episode_save(%s)' % episode.title)
logger.info('on_episode_save(%s)', episode.title)
def on_episodes_context_menu(self, episodes):
log(u'on_episodes_context_menu(%d episodes)', % len(episodes))
logger.info('on_episodes_context_menu(%d episodes)', len(episodes))

View File

@ -26,13 +26,13 @@
import gpodder
from gpodder import util
from gpodder.liblogger import log
import atexit
import os
import time
import threading
import ConfigParser
import logging
_ = gpodder.gettext
@ -114,6 +114,7 @@ gPodderSettings.update(window_props('_main_window', width=700, height=500))
gPodderSettings.update(window_props('_episode_selector', width=600, height=400))
gPodderSettings.update(window_props('_episode_window', width=500, height=400))
logger = logging.getLogger(__name__)
class Config(dict):
Settings = gPodderSettings
@ -152,7 +153,7 @@ class Config(dict):
if callback not in self.__observers:
self.__observers.append(callback)
else:
log('Observer already added: %s', repr(callback), sender=self)
logger.warn('Observer already added: %s', repr(callback))
def remove_observer(self, callback):
"""
@ -161,7 +162,7 @@ class Config(dict):
if callback in self.__observers:
self.__observers.remove(callback)
else:
log('Observer not added :%s', repr(callback), sender=self)
logger.warn('Observer not added: %s', repr(callback))
def schedule_save(self):
if self.__save_thread is None:
@ -201,7 +202,7 @@ class Config(dict):
if filename is None:
filename = self.__filename
log('Flushing settings to disk', sender=self)
logger.info('Flushing settings to disk')
parser = ConfigParser.RawConfigParser()
parser.add_section(self.__section)
@ -213,8 +214,8 @@ class Config(dict):
try:
parser.write(open(filename, 'w'))
except:
log('Cannot write settings to %s', filename, sender=self)
raise IOError('Cannot write to file: %s' % filename)
logger.error('Cannot write settings to %s', filename)
raise
self.__save_thread = None
@ -228,8 +229,8 @@ class Config(dict):
try:
parser.read(self.__filename)
except:
log('Cannot parse config file: %s', self.__filename,
sender=self, traceback=True)
logger.warn('Cannot parse config file: %s',
self.__filename, exc_info=True)
for key, default in self.Settings.items():
fieldtype = type(default)
@ -246,8 +247,8 @@ class Config(dict):
else:
value = fieldtype(parser.get(self.__section, key))
except:
log('Invalid value in %s for %s: %s', self.__filename,
key, value, sender=self, traceback=True)
logger.warn('Invalid value in %s for %s: %s',
self.__filename, key, value, exc_info=True)
value = default
self[key] = value
@ -259,9 +260,9 @@ class Config(dict):
if fieldtype == bool:
setattr(self, name, not getattr(self, name))
else:
log('Cannot toggle value: %s (not boolean)', name, sender=self)
logger.warn('Cannot toggle value: %s (not boolean)', name)
else:
log('Invalid setting name: %s', name, sender=self)
logger.warn('Invalid setting name: %s', name)
def update_field(self, name, new_value):
if name in self.Settings:
@ -270,12 +271,13 @@ class Config(dict):
try:
new_value = fieldtype(new_value)
except:
log('Cannot convert "%s" to %s. Ignoring.', str(new_value), fieldtype.__name__, sender=self)
logger.warn('Cannot convert %s to %s.', str(new_value),
fieldtype.__name__, exc_info=True)
return False
setattr(self, name, new_value)
return True
else:
log('Invalid setting name: %s', name, sender=self)
logger.info('Ignoring invalid setting: %s', name)
return False
def __setattr__(self, name, value):
@ -285,16 +287,15 @@ class Config(dict):
try:
if self[name] != fieldtype(value):
old_value = self[name]
log('Update %s: %s => %s', name, old_value, value, sender=self)
logger.info('Update %s: %s => %s', name, old_value, value)
self[name] = fieldtype(value)
for observer in self.__observers:
try:
# Notify observer about config change
observer(name, old_value, self[name])
except:
log('Error while calling observer: %s',
repr(observer), sender=self,
traceback=True)
logger.error('Error while calling observer: %s',
repr(observer), exc_info=True)
self.schedule_save()
except:
raise ValueError('%s has to be of type %s' % (name, fieldtype.__name__))

View File

@ -48,7 +48,8 @@ if not have_sqlite:
print >>sys.stderr, 'Please install pysqlite2 or Python 2.5.'
sys.exit(1)
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
from gpodder import schema
@ -107,20 +108,12 @@ class Database(object):
with self.lock:
cur = self.cursor()
log('Optimizing database for faster startup.', sender=self)
cur.execute("VACUUM")
cur.close()
self._db.close()
self._db = None
def log(self, message, *args, **kwargs):
try:
message = message % args
log('%s', message, sender=self)
except TypeError, e:
log('Exception in log(): %s: %s', e, message, sender=self)
def purge(self, max_episodes, podcast_id):
"""
Deletes old episodes. Should be called
@ -132,7 +125,7 @@ class Database(object):
with self.lock:
cur = self.cursor()
self.log("purge(%s)", podcast_id)
logger.debug('Purge requested for podcast %d', podcast_id)
sql = """
DELETE FROM %s
WHERE podcast_id = ?
@ -160,7 +153,7 @@ class Database(object):
b = b.translate(self.UNICODE_TRANSLATE)
return cmp(a, b)
except:
log('Error while comparing "%s" and "%s"', a, b, sender=self, traceback=True)
logger.warn('Error comparing %s <=> %s', a, b, exc_info=True)
a = re.sub('^the ', '', a.lower())
b = re.sub('^the ', '', b.lower())
return cmp(a, b)
@ -175,7 +168,7 @@ class Database(object):
# Check schema version, upgrade if necessary
schema.upgrade(self._db)
self.log('Connected')
logger.debug('Database opened.')
return self._db
def cursor(self):
@ -184,10 +177,10 @@ class Database(object):
def commit(self):
self.lock.acquire()
try:
self.log("COMMIT")
logger.debug('Commit.')
self.db.commit()
except Exception, e:
log('Error commiting changes: %s', e, sender=self, traceback=True)
logger.error('Cannot commit: %s', e, exc_info=True)
self.lock.release()
def get_content_types(self, id):
@ -258,7 +251,7 @@ class Database(object):
as the only argument.
"""
self.log("load_podcasts()")
logger.debug('load_podcasts')
with self.lock:
cur = self.cursor()
@ -287,7 +280,7 @@ class Database(object):
with self.lock:
cur = self.cursor()
self.log("delete_podcast(%d), %s", podcast.id, podcast.url)
logger.debug('delete_podcast: %d (%s)', podcast.id, podcast.url)
cur.execute("DELETE FROM %s WHERE id = ?" % self.TABLE_PODCAST, (podcast.id, ))
cur.execute("DELETE FROM %s WHERE podcast_id = ?" % self.TABLE_EPISODE, (podcast.id, ))
@ -297,7 +290,7 @@ class Database(object):
self.db.commit()
def load_all_episodes(self, podcast_mapping, limit=10000):
self.log('Loading all episodes from the database')
logger.info('Loading all episodes from the database')
sql = 'SELECT * FROM %s ORDER BY published DESC LIMIT ?' % (self.TABLE_EPISODE,)
args = (limit,)
with self.lock:
@ -312,7 +305,7 @@ class Database(object):
def load_episodes(self, podcast, factory=lambda x: x, limit=1000, state=None):
assert podcast.id
self.log('Loading episodes for podcast %d', podcast.id)
logger.info('Loading episodes for podcast %d', podcast.id)
if state is None:
sql = 'SELECT * FROM %s WHERE podcast_id = ? ORDER BY published DESC LIMIT ?' % (self.TABLE_EPISODE,)
@ -377,7 +370,7 @@ class Database(object):
try:
d = dict(zip((desc[0] for desc in cur.description), cur.fetchone()))
cur.close()
self.log('Loaded episode %d from DB', id)
logger.info('Loaded episode %d', id)
return d
except:
cur.close()
@ -410,7 +403,7 @@ class Database(object):
sql = 'UPDATE %s SET %s WHERE id = ?' % (table, qmarks)
cur.execute(sql, values)
except Exception, e:
log('Cannot save %s to %s: %s', o, table, e, sender=self, traceback=True)
logger.error('Cannot save %s: %s', o, e, exc_info=True)
cur.close()
self.lock.release()
@ -430,8 +423,6 @@ class Database(object):
with self.lock:
cur = self.cursor()
self.log("get(): %s", sql)
if params is None:
cur.execute(sql)
else:

View File

@ -27,7 +27,9 @@
from __future__ import with_statement
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
from gpodder import util
from gpodder import youtube
import gpodder
@ -75,8 +77,7 @@ def get_header_param(headers, param, header_name):
value.append(unicode(part))
return u''.join(value)
except Exception, e:
log('Error trying to get %s from %s: %s', \
param, header_name, str(e), traceback=True)
logger.error('Cannot get %s from %s', param, header_name, exc_info=True)
return None
@ -261,7 +262,7 @@ class DownloadURLOpener(urllib.FancyURLopener):
if current_size > 0:
self.addheader('Range', 'bytes=%s-' % (current_size))
except:
log('Cannot open file for resuming: %s', filename, sender=self, traceback=True)
logger.warn('Cannot resume download: %s', filename, exc_info=True)
tfp = None
current_size = 0
@ -288,7 +289,7 @@ class DownloadURLOpener(urllib.FancyURLopener):
tfp.close()
tfp = open(filename, 'wb')
current_size = 0
log('Cannot resume. Missing or wrong Content-Range header (RFC2616)', sender=self)
logger.warn('Cannot resume: Invalid Content-Range (RFC2616).')
result = headers, fp.geturl()
bs = 1024*8
@ -332,7 +333,8 @@ class DownloadURLOpener(urllib.FancyURLopener):
raise AuthenticationError(_('Wrong username/password'))
if self.channel.auth_username or self.channel.auth_password:
log( 'Authenticating as "%s" to "%s" for realm "%s".', self.channel.auth_username, host, realm, sender = self)
logger.debug('Authenticating as "%s" to "%s" for realm "%s".',
self.channel.auth_username, host, realm)
return ( self.channel.auth_username, self.channel.auth_password )
return (None, None)
@ -352,22 +354,21 @@ class DownloadQueueWorker(threading.Thread):
self.minimum_tasks = minimum_tasks
def run(self):
log('Running new thread: %s', self.getName(), sender=self)
logger.info('Starting new thread: %s', self.getName())
while True:
# Check if this thread is allowed to continue accepting tasks
# (But only after reducing minimum_tasks to zero - see above)
if self.minimum_tasks > 0:
self.minimum_tasks -= 1
elif not self.continue_check_callback(self):
log('%s must not accept new tasks.', self.getName(), sender=self)
return
try:
task = self.queue.pop()
log('%s is processing: %s', self.getName(), task, sender=self)
logger.info('%s is processing: %s', self.getName(), task)
task.run()
except IndexError, e:
log('No more tasks for %s to carry out.', self.getName(), sender=self)
logger.info('No more tasks for %s to carry out.', self.getName())
break
self.exit_callback(self)
@ -408,7 +409,7 @@ class DownloadQueueManager(object):
len(self.worker_threads) < self._config.max_downloads or \
not self._config.max_downloads_enabled:
# We have to create a new thread here, there's work to do
log('I am going to spawn a new worker thread.', sender=self)
logger.info('Starting new worker thread.')
# The new worker should process at least one task (the one
# that we want to forcefully start) if force_start is True.
@ -609,7 +610,7 @@ class DownloadTask(object):
if self.total_size > 0:
self.progress = max(0.0, min(1.0, float(already_downloaded)/self.total_size))
except OSError, os_error:
log('Error while getting size for existing file: %s', os_error, sender=self)
logger.error('Cannot get size for %s', os_error)
else:
# "touch self.tempname", so we also get partial
# files for resuming when the file is queued
@ -728,7 +729,7 @@ class DownloadTask(object):
old_mimetype = self.__episode.mime_type
_basename, ext = os.path.splitext(self.filename)
if new_mimetype != old_mimetype or util.wrong_extension(ext):
log('Correcting mime type: %s => %s', old_mimetype, new_mimetype, sender=self)
logger.info('Updating mime type: %s => %s', old_mimetype, new_mimetype)
old_extension = self.__episode.extension()
self.__episode.mime_type = new_mimetype
new_extension = self.__episode.extension()
@ -753,8 +754,8 @@ class DownloadTask(object):
force_update=True, template=disposition_filename)
new_mimetype, encoding = mimetypes.guess_type(self.filename)
if new_mimetype is not None:
log('Using content-disposition mimetype: %s',
new_mimetype, sender=self)
logger.info('Using content-disposition mimetype: %s',
new_mimetype)
self.__episode.set_mimetype(new_mimetype, commit=True)
shutil.move(self.tempname, self.filename)
@ -762,7 +763,7 @@ class DownloadTask(object):
# Model- and database-related updates after a download has finished
self.__episode.on_downloaded(self.filename)
except DownloadCancelledException:
log('Download has been cancelled/paused: %s', self, sender=self)
logger.info('Download has been cancelled/paused: %s', self)
if self.status == DownloadTask.CANCELLED:
util.delete_file(self.tempname)
self.progress = 0.0
@ -771,18 +772,21 @@ class DownloadTask(object):
self.status = DownloadTask.FAILED
self.error_message = _('Missing content from server')
except IOError, ioe:
log( 'Error "%s" while downloading "%s": %s', ioe.strerror, self.__episode.title, ioe.filename, sender=self, traceback=True)
logger.error('%s while downloading "%s": %s', ioe.strerror,
self.__episode.title, ioe.filename, exc_info=True)
self.status = DownloadTask.FAILED
d = {'error': ioe.strerror, 'filename': ioe.filename}
self.error_message = _('I/O Error: %(error)s: %(filename)s') % d
except gPodderDownloadHTTPError, gdhe:
log( 'HTTP error %s while downloading "%s": %s', gdhe.error_code, self.__episode.title, gdhe.error_message, sender=self)
logger.error('HTTP %s while downloading "%s": %s',
gdhe.error_code, self.__episode.title, gdhe.error_message,
exc_info=True)
self.status = DownloadTask.FAILED
d = {'code': gdhe.error_code, 'message': gdhe.error_message}
self.error_message = _('HTTP Error %(code)s: %(message)s') % d
except Exception, e:
self.status = DownloadTask.FAILED
log('Download error: %s', str(e), traceback=True, sender=self)
logger.error('Download failed: %s', str(e), exc_info=True)
self.error_message = _('Error: %s') % (str(e),)
if self.status == DownloadTask.DOWNLOADING:
@ -790,7 +794,7 @@ class DownloadTask(object):
self.status = DownloadTask.DONE
if self.total_size <= 0:
self.total_size = util.calculate_size(self.filename)
log('Total size updated to %d', self.total_size, sender=self)
logger.info('Total size updated to %d', self.total_size)
self.progress = 1.0
if gpodder.user_hooks is not None:
gpodder.user_hooks.on_episode_downloaded(self.__episode)

View File

@ -17,10 +17,11 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from gpodder.liblogger import log
from mygpoclient import feeds
import logging
logger = logging.getLogger(__name__)
def parse_entry(podcast, entry):
download_url = entry['default_file']['url']
@ -46,18 +47,18 @@ def update_using_feedservice(podcasts):
for podcast in podcasts:
feed = result.get_feed(podcast.url)
if feed is None:
log('Feed not updated: %s', podcast.url)
logger.info('Feed not updated: %s', podcast.url)
continue
# Handle permanent redirects
if feed.get('new_location', False):
new_url = feed['new_location']
log('Redirect %s => %s', podcast.url, new_url)
logger.info('Redirect %s => %s', podcast.url, new_url)
podcast.url = new_url
# Error handling
if feed.get('errors', False):
log('Error parsing feed: %s', repr(feed['errors']))
logger.error('Error parsing feed: %s', repr(feed['errors']))
continue
# Update per-podcast metadata

View File

@ -191,6 +191,4 @@ class UIConfig(config.Config):
window.show()
if getattr(self, maximized, False):
window.maximize()
else:
raise ValueError('Cannot connect %s', config_prefix, sender=self)

View File

@ -27,7 +27,6 @@ _ = gpodder.gettext
N_ = gpodder.ngettext
from gpodder import util
from gpodder.liblogger import log
from gpodder.gtkui.interface.common import BuilderWidget
@ -193,19 +192,16 @@ class gPodderEpisodeSelector(BuilderWidget):
try:
tooltip = getattr(episode, self.tooltip_attribute)
except:
log('Episode object %s does not have tooltip attribute: "%s"', episode, self.tooltip_attribute, sender=self)
tooltip = None
row = [ index, tooltip, self.selected[index] ]
for name, sort_name, sort_type, caption in self.columns:
if not hasattr(episode, name):
log('Warning: Missing attribute "%s"', name, sender=self)
row.append(None)
else:
row.append(getattr( episode, name))
if sort_name is not None:
if not hasattr(episode, sort_name):
log('Warning: Missing attribute "%s"', sort_name, sender=self)
row.append(None)
else:
row.append(getattr( episode, sort_name))
@ -221,7 +217,7 @@ class gPodderEpisodeSelector(BuilderWidget):
self.treeviewEpisodes.set_property('has-tooltip', True)
self.treeviewEpisodes.connect('query-tooltip', self.treeview_episodes_query_tooltip)
except:
log('I cannot set has-tooltip/query-tooltip (need at least PyGTK 2.12)', sender=self)
pass
self.last_tooltip_episode = None
self.episode_list_can_tooltip = True
@ -312,7 +308,7 @@ class gPodderEpisodeSelector(BuilderWidget):
total_size += int(getattr( episode, self.size_attribute))
count += 1
except:
log( 'Cannot get size for %s', episode.title, sender = self)
pass
text = []
if count == 0:

View File

@ -29,7 +29,8 @@ import gpodder
_ = gpodder.gettext
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
from gpodder import util
@ -62,7 +63,7 @@ class gPodderShownotes(gPodderShownotesBase):
setattr(self, 'have_webkit', True)
setattr(self, 'htmlview', webkit.WebView())
else:
log('Your WebKit is too old (see bug 1001).', sender=self)
logger.warn('Your WebKit is too old (gPodder bug 1001).')
setattr(self, 'have_webkit', False)
def navigation_policy_decision(wv, fr, req, action, decision):

View File

@ -35,7 +35,6 @@ import gtk
import gtk.gdk
import gpodder
from gpodder.liblogger import log
_ = gpodder.gettext
@ -120,12 +119,10 @@ class UserAppsReader(object):
return
self.__has_read = True
log('start reader', bench_start=True)
for dir in userappsdirs:
if os.path.exists( dir):
for file in glob.glob(os.path.join(dir, '*.desktop')):
self.parse_and_append( file)
log('end reader', bench_end=True)
self.__finished.set()
def parse_and_append( self, filename):
@ -139,7 +136,6 @@ class UserAppsReader(object):
app_mime = parser.get(sect, 'MimeType')
for needed_type in self.mimetypes:
if app_mime.find(needed_type+'/') != -1:
log('Player found: %s', filename, sender=self)
app_name = parser.get(sect, 'Name')
app_cmd = parser.get(sect, 'Exec')
app_icon = parser.get(sect, 'Icon')

View File

@ -27,7 +27,6 @@ _ = gpodder.gettext
N_ = gpodder.ngettext
from gpodder import util
from gpodder.liblogger import log
from gpodder.gtkui.interface.common import BuilderWidget
from gpodder.gtkui.interface.common import Orientation
@ -226,7 +225,6 @@ class gPodderEpisodeSelector(BuilderWidget):
try:
tooltip = getattr(episode, self.tooltip_attribute)
except:
log('Episode object %s does not have tooltip attribute: "%s"', episode, self.tooltip_attribute, sender=self)
tooltip = None
row = [ index, tooltip, self.selected[index] ]
for name, sort_name, sort_type, caption in self.columns:
@ -238,14 +236,12 @@ class gPodderEpisodeSelector(BuilderWidget):
args = markup_re.match(markup).groups()
row.append(self._markup_template % args)
elif not hasattr(episode, name):
log('Warning: Missing attribute "%s"', name, sender=self)
row.append(None)
else:
row.append(getattr( episode, name))
if sort_name is not None:
if not hasattr(episode, sort_name):
log('Warning: Missing attribute "%s"', sort_name, sender=self)
row.append(None)
else:
row.append(getattr( episode, sort_name))
@ -326,7 +322,7 @@ class gPodderEpisodeSelector(BuilderWidget):
total_size += int(getattr( episode, self.size_attribute))
count += 1
except:
log( 'Cannot get size for %s', episode.title, sender = self)
pass
text = []
if count == 0:

View File

@ -38,7 +38,9 @@ import urllib
import time
import gpodder
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
class gPodderPlayer(dbus.service.Object):
# Empty class with method definitions to send D-Bus signals
@ -252,7 +254,7 @@ class MafwPlaybackMonitor(object):
# player cycles back to the beginning; use wall time in
# this case, too.
position = self._start_position + (time.time()-self._start_time)
log('fixed end position 0 using wall-time: %d (duration: %d)', \
logger.warn('Wall-time positioning: %d (duration: %d)',
position, self._duration)
if self._start_position != position:
self._player.PlaybackStopped(self._start_position, \

View File

@ -303,7 +303,6 @@ class EpisodeListModel(gtk.GenericTreeModel):
(gpodder.STATE_DOWNLOADED, gpodder.STATE_NORMAL))) or \
self._downloading(episode)
else:
log('Should never reach this in has_episodes()!', sender=self)
return True
return any(is_visible(episode) for episode in self._episodes)

View File

@ -30,7 +30,9 @@ _ = gpodder.gettext
from gpodder import util
from gpodder import model
from gpodder import query
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
from gpodder.gtkui import draw
@ -426,7 +428,7 @@ class PodcastChannelProxy(object):
try:
return object.__getattribute__(self, name)
except AttributeError:
log('Unsupported method call (%s)', name, sender=self)
logger.warn('Unsupported method call (%s)', name)
def get_statistics(self):
# Get the total statistics for all channels from the database

View File

@ -27,7 +27,9 @@ import gpodder
_ = gpodder.gettext
from gpodder.services import ObservableService
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
from gpodder import util
from gpodder import youtube
@ -73,7 +75,7 @@ class CoverDownloader(ObservableService):
already-downloaded covers and return None
when we have no cover on the local disk.
"""
log('cover download request for %s', channel.url, sender=self)
logger.debug('cover download request for %s', channel.url)
args = [channel, custom_url, True, avoid_downloading]
threading.Thread(target=self.__get_cover, args=args).start()
@ -144,32 +146,32 @@ class CoverDownloader(ObservableService):
query = ''
split_result = (scheme, netloc, path, query, fragment)
url = urlparse.urlunsplit(split_result)
log('Trying favicon: %s', url, sender=self)
logger.debug('Trying favicon: %s', url)
if url is not None:
image_data = None
try:
log('Trying to download: %s', url, sender=self)
logger.debug('Trying to download: %s', url)
image_data = util.urlopen(url).read()
except:
log('Cannot get image from %s', url, sender=self)
logger.warn('Cannot get image from %s', url, exc_info=True)
if image_data is not None:
log('Saving image data to %s', channel.cover_file, sender=self)
logger.debug('Saving image data to %s', channel.cover_file)
try:
fp = open(channel.cover_file, 'wb')
fp.write(image_data)
fp.close()
except IOError, ioe:
log('Cannot save image due to I/O error', sender=self, traceback=True)
logger.error('Cannot save image due to I/O error', exc_info=True)
pixbuf = None
if os.path.exists(channel.cover_file):
try:
pixbuf = gtk.gdk.pixbuf_new_from_file(channel.cover_file.decode(util.encoding, 'ignore'))
except:
log('Data error while loading %s', channel.cover_file, sender=self)
logger.error('Data error while loading %s', channel.cover_file)
if pixbuf is None:
pixbuf = self.get_default_cover(channel)

View File

@ -74,7 +74,9 @@ from gpodder import download
from gpodder import my
from gpodder import youtube
from gpodder import player
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
_ = gpodder.gettext
N_ = gpodder.ngettext
@ -452,7 +454,6 @@ class gPodder(BuilderWidget, dbus.service.Object):
for e in c.get_all_episodes():
filename = e.local_filename(create=False, check_only=True)
if filename in candidates:
log('Found episode: %s', e.title, sender=self)
found += 1
indicator.on_message(e.title)
indicator.on_progress(float(found)/count)
@ -473,7 +474,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
break
for f in partial_files:
log('Partial file without episode: %s', f, sender=self)
logger.warn('Partial file without episode: %s', f)
util.delete_file(f)
util.idle_add(indicator.on_finished)
@ -595,7 +596,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
# as they can happen with seeking, etc...
return
log('Received play action: %s (%d, %d, %d)', file_uri, start, end, total, sender=self)
logger.debug('Received play action: %s (%d, %d, %d)', file_uri, start, end, total)
episode = self.episode_object_by_uri(file_uri)
if episode is not None:
@ -650,7 +651,6 @@ class gPodder(BuilderWidget, dbus.service.Object):
break
changes.append(my.Change(action, podcast_object))
else:
log('Ignoring action: %s', action, sender=self)
ignored.append(action)
# Confirm all ignored changes
@ -701,8 +701,8 @@ class gPodder(BuilderWidget, dbus.service.Object):
for channel in self.channels:
if channel.url == rewritten_url.old_url:
log('Updating URL of %s to %s', channel, \
rewritten_url.new_url, sender=self)
logger.info('Updating URL of %s to %s', channel,
rewritten_url.new_url)
channel.url = rewritten_url.new_url
channel.save()
self.channel_list_changed = True
@ -1285,7 +1285,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
# below gets the correct list of "seen" tasks
self.download_tasks_seen.remove(task)
except KeyError, key_error:
log('Cannot remove task from "seen" list: %s', task, sender=self)
pass
changed_episode_urls.add(task.url)
# Tell the task that it has been removed (so it can clean up)
task.removed_from_list()
@ -1425,7 +1425,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
else:
if gpodder.ui.desktop:
self.downloads_finished(self.download_tasks_seen)
log('All downloads have finished.', sender=self)
logger.info('All downloads have finished.')
if gpodder.ui.fremantle:
message = '\n'.join(['%s: %s' % (str(task), \
@ -1455,7 +1455,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
return self.download_list_update_enabled
except Exception, e:
log('Exception happened while updating download list.', sender=self, traceback=True)
logger.error('Exception happened while updating download list.', exc_info=True)
self.show_message('%s\n\n%s' % (_('Please report this problem and restart gPodder:'), str(e)), _('Unhandled exception'), important=True)
# We return False here, so the update loop won't be called again,
# that's why we require the restart of gPodder in the message.
@ -1718,7 +1718,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
# below gets the correct list of "seen" tasks
self.download_tasks_seen.remove(task)
except KeyError, key_error:
log('Cannot remove task from "seen" list: %s', task, sender=self)
pass
episode_urls.add(task.url)
# Tell the task that it has been removed (so it can clean up)
task.removed_from_list()
@ -1882,7 +1882,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
shutil.copyfile(filename, destfile)
util.bluetooth_send_file(destfile)
except:
log('Cannot copy "%s" to "%s".', filename, destfile, sender=self)
logger.error('Cannot copy "%s" to "%s".', filename, destfile)
self.notification(_('Error converting file.'), _('Bluetooth file transfer'), important=True)
util.delete_file(destfile)
@ -2131,13 +2131,12 @@ class gPodder(BuilderWidget, dbus.service.Object):
pass
def error_handler(filename, err):
log('Exception in D-Bus call: %s', str(err), \
sender=self)
logger.error('Exception in D-Bus call: %s', str(err))
# Fallback: use the command line client
for command in util.format_desktop_command('panucci', \
[filename]):
log('Executing: %s', repr(command), sender=self)
logger.info('Executing: %s', repr(command))
subprocess.Popen(command)
on_error = lambda err: error_handler(filename, err)
@ -2148,7 +2147,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
continue # This file was handled by the D-Bus call
except Exception, e:
log('Error calling Panucci using D-Bus', sender=self, traceback=True)
logger.error('Calling Panucci using D-Bus', exc_info=True)
elif player == 'MediaBox' and gpodder.ui.fremantle:
try:
MEDIABOX_NAME = 'de.pycage.mediabox'
@ -2161,15 +2160,14 @@ class gPodder(BuilderWidget, dbus.service.Object):
pass
def on_error(err):
log('Exception in D-Bus call: %s', str(err), \
sender=self)
logger.error('Exception in D-Bus call: %s', str(err))
i.load(filename, '%s/x-unknown' % file_type, \
reply_handler=on_reply, error_handler=on_error)
continue # This file was handled by the D-Bus call
except Exception, e:
log('Error calling MediaBox using D-Bus', sender=self, traceback=True)
logger.error('Calling MediaBox using D-Bus', exc_info=True)
groups[player].append(filename)
@ -2208,7 +2206,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
util.gui_open(m3u_filename)
else:
for filename in groups['default']:
log('Opening with system default: %s', filename, sender=self)
logger.debug('Opening with system default: %s', filename)
util.gui_open(filename)
del groups['default']
elif gpodder.ui.fremantle and groups:
@ -2230,7 +2228,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
# For each type now, go and create play commands
for group in groups:
for command in util.format_desktop_command(group, groups[group], resume_position):
log('Executing: %s', repr(command), sender=self)
logger.debug('Executing: %s', repr(command))
subprocess.Popen(command)
# Persist episode status changes to the database
@ -2247,7 +2245,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
try:
self.playback_episodes_for_real(episodes)
except Exception, e:
log('Error in playback!', sender=self, traceback=True)
logger.error('Error in playback!', exc_info=True)
if gpodder.ui.desktop:
self.show_message(_('Please check your media player settings in the preferences dialog.'), \
_('Error opening player'), widget=self.toolPreferences)
@ -2285,7 +2283,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
try:
episode = model.get_value(model.get_iter(path), EpisodeListModel.C_EPISODE)
except TypeError, te:
log('Invalid episode at path %s', str(path), sender=self)
logger.error('Invalid episode at path %s', str(path))
continue
if episode.file_type() not in ('audio', 'video'):
@ -2408,7 +2406,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
selection.select_iter(selected_iter)
self.on_treeChannels_cursor_changed(self.treeChannels)
except:
log('Cannot select podcast in list', traceback=True, sender=self)
logger.error('Cannot select podcast in list', exc_info=True)
self.channel_list_changed = False
def episode_is_downloading(self, episode):
@ -2594,7 +2592,6 @@ class gPodder(BuilderWidget, dbus.service.Object):
for index, url in enumerate(queued):
progress.on_progress(float(index)/float(length))
progress.on_message(url)
log('QUEUE RUNNER: %s', url, sender=self)
try:
# The URL is valid and does not exist already - subscribe!
channel = Model.load_podcast(self.db, url=url, create=True, \
@ -2629,7 +2626,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
error_messages[url] = _('Redirection detected')
continue
except Exception, e:
log('Subscription error: %s', e, traceback=True, sender=self)
logger.error('Subscription error: %s', e, exc_info=True)
error_messages[url] = str(e)
failed.append(url)
continue
@ -2672,17 +2669,17 @@ class gPodder(BuilderWidget, dbus.service.Object):
action.episode_url)
if episode is not None:
log('Play action for %s', episode.url, sender=self)
logger.debug('Play action for %s', episode.url)
episode.mark(is_played=True)
if action.timestamp > episode.current_position_updated and \
action.position is not None:
log('Updating position for %s', episode.url, sender=self)
logger.debug('Updating position for %s', episode.url)
episode.current_position = action.position
episode.current_position_updated = action.timestamp
if action.total:
log('Updating total time for %s', episode.url, sender=self)
logger.debug('Updating total time for %s', episode.url)
episode.total_time = action.total
episode.save()
@ -2693,7 +2690,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
if episode is not None:
if not episode.was_downloaded(and_exists=True):
# Set the episode to a "deleted" state
log('Marking as deleted: %s', episode.url, sender=self)
logger.debug('Marking as deleted: %s', episode.url)
episode.delete_from_disk()
episode.save()
@ -2734,7 +2731,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
try:
return any(w.get_property('is-topmost') for w in hildon.WindowStack.get_default().get_windows())
except Exception, e:
log('Could not determine is-topmost', traceback=True)
logger.warn('Could not determine is-topmost', exc_info=True)
# When in doubt, assume not in foreground
return False
@ -2771,7 +2768,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
n.show()
self._fremantle_notification_visible = True
except Exception, e:
log('Error: %s', str(e), sender=self, traceback=True)
logger.error('Error: %s', str(e), exc_info=True)
self.new_episodes_show(episodes)
self._fremantle_notification_visible = False
elif not self.config.auto_update_feeds:
@ -2834,7 +2831,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
else:
message = _('The feed at %(url)s could not be updated.')
self.notification(message % d, _('Error while updating feed'), widget=self.treeChannels)
log('Error: %s', str(e), sender=self, traceback=True)
logger.error('Error: %s', str(e), exc_info=True)
if self.feed_cache_update_cancelled:
break
@ -3037,10 +3034,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
episodes_status_update = []
for idx, episode in enumerate(episodes):
progress.on_progress(float(idx)/float(len(episodes)))
if episode.archive and skip_locked:
log('Not deleting episode (is locked): %s', episode.title)
else:
log('Deleting episode: %s', episode.title)
if not episode.archive or not skip_locked:
progress.on_message(episode.title)
episode.delete_from_disk()
episode_urls.add(episode.url)
@ -3169,7 +3163,6 @@ class gPodder(BuilderWidget, dbus.service.Object):
def on_itemUpdate_activate(self, widget=None):
# Check if we have outstanding subscribe/unsubscribe actions
if self.on_add_remove_podcasts_mygpo():
log('Update cancelled (received server changes)', sender=self)
return
if self.channels:
@ -3187,7 +3180,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
enable_update = False
for episode in episodes:
log('Downloading episode: %s', episode.title, sender = self)
logger.debug('Downloading episode: %s', episode.title)
if not episode.was_downloaded(and_exists=True):
task_exists = False
for task in self.download_tasks_seen:
@ -3206,7 +3199,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
d = {'episode': episode.title, 'message': str(e)}
message = _('Download error while downloading %(episode)s: %(message)s')
self.show_message(message % d, _('Download error'), important=True)
log('Download error while downloading %s', episode.title, sender=self, traceback=True)
logger.error('While downloading %s', episode.title, exc_info=True)
continue
if add_paused:
@ -3417,7 +3410,6 @@ class gPodder(BuilderWidget, dbus.service.Object):
def remove_podcast_list(self, channels, confirm=True):
if not channels:
log('No podcasts selected for deletion', sender=self)
return
if len(channels) == 1:
@ -3756,7 +3748,6 @@ class gPodder(BuilderWidget, dbus.service.Object):
def show_episode_shownotes(self, episode):
if self.episode_shownotes_window is None:
log('First-time use of episode window --- creating', sender=self)
self.episode_shownotes_window = gPodderShownotes(self.gPodder, _config=self.config, \
_download_episode_list=self.download_episode_list, \
_playback_episodes=self.playback_episodes, \
@ -3771,20 +3762,20 @@ class gPodder(BuilderWidget, dbus.service.Object):
def restart_auto_update_timer(self):
if self._auto_update_timer_source_id is not None:
log('Removing existing auto update timer.', sender=self)
logger.debug('Removing existing auto update timer.')
gobject.source_remove(self._auto_update_timer_source_id)
self._auto_update_timer_source_id = None
if self.config.auto_update_feeds and \
self.config.auto_update_frequency:
interval = 60*1000*self.config.auto_update_frequency
log('Setting up auto update timer with interval %d.', \
self.config.auto_update_frequency, sender=self)
logger.debug('Setting up auto update timer with interval %d.',
self.config.auto_update_frequency)
self._auto_update_timer_source_id = gobject.timeout_add(\
interval, self._on_auto_update_timer)
def _on_auto_update_timer(self):
log('Auto update timer fired.', sender=self)
logger.debug('Auto update timer fired.')
self.update_feed_cache(force_update=True)
# Ask web service for sub changes (if enabled)
@ -3911,7 +3902,7 @@ def main(options=None):
bus_name = dbus.service.BusName(gpodder.dbus_bus_name, bus=gpodder.dbus_session_bus)
except dbus.exceptions.DBusException, dbe:
log('Warning: Cannot get "on the bus".', traceback=True)
logger.warn('Cannot get "on the bus".', exc_info=True)
dlg = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_ERROR, \
gtk.BUTTONS_CLOSE, _('Cannot start gPodder'))
dlg.format_secondary_markup(_('D-Bus error: %s') % (str(dbe),))

View File

@ -36,7 +36,8 @@ import functools
import gpodder
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
def call_hooks(func):
@ -55,8 +56,8 @@ def call_hooks(func):
if callback is not None:
result = callback(*args, **kwargs)
except Exception, e:
log('Error in %s, function %s: %s', filename, method_name, \
e, traceback=True, sender=self)
logger.error('Error in %s, function %s: %s', filename,
method_name, e, exc_info=True)
func(self, *args, **kwargs)
return result
@ -76,9 +77,9 @@ class HookManager(object):
module = self._load_module(filename)
if module is not None:
self.modules.append((filename, module))
log('Module loaded: %s', filename, sender=self)
logger.info('Module loaded: %s', filename)
except Exception, e:
log('Error loading %s: %s', filename, e, sender=self)
logger.error('Cannot load %s: %s', filename, e, exc_info=True)
def has_modules(self):
"""Check whether this manager manages any modules

View File

@ -17,43 +17,19 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# liblogger.py -- gPodder logging facility
# Thomas Perl <thp perli net> 20061117
#
#
# gpodder.liblogger - DEPRECATED logging facility
# Thomas Perl, 2011-07-15
import traceback
import time
# XXX Deprecation warning XXX
# This module is here to support old hooks scripts that have not
# yet been rewritten to utilize the standard 'logging' module.
# Please do not use this DEPRECATED module in new code!
# XXX Deprecation warning XXX
write_to_stdout = False
import logging
logger = logging.getLogger('DEPRECATED:' + __name__)
def enable_verbose():
global write_to_stdout
write_to_stdout = True
first_time = time.time()
last_times = []
def log( message, *args, **kwargs):
global first_time
global last_times
if 'sender' in kwargs:
message = '(%s) %s' % ( kwargs['sender'].__class__.__name__, message )
if 'bench_start' in kwargs:
last_times.append(time.time())
if 'bench_end' in kwargs and len(last_times) > 0:
message += (' (benchmark: %.4f seconds)' % (time.time()-(last_times.pop())))
if write_to_stdout:
print (('[%8.3f] ' % (time.time()-first_time)) + message) % args
if kwargs.get( 'traceback', False):
error = traceback.format_exc()
if error.strip() != 'None':
print error
def msg( type, message, *args):
s = message % args
print '%c\t%s' % ( type[0].upper(), s )
def log(message, *args, **kwargs):
"""DEPRECATED - do not use in new code!"""
logger.info(message % args, exc_info=True)

View File

@ -28,7 +28,8 @@ from gpodder import util
from gpodder import feedcore
from gpodder import youtube
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
import os
import re
@ -195,7 +196,7 @@ class PodcastEpisode(PodcastModelObject):
episode.mime_type = e.get('type', 'application/octet-stream')
if episode.mime_type == '':
# See Maemo bug 10036
log('Fixing empty mimetype in ugly feed', sender=episode)
logger.warn('Fixing empty mimetype in ugly feed')
episode.mime_type = 'application/octet-stream'
if '/' not in episode.mime_type:
@ -438,9 +439,9 @@ class PodcastEpisode(PodcastModelObject):
if 'redirect' in fn_template and template is None:
# This looks like a redirection URL - force URL resolving!
log('Looks like a redirection to me: %s', self.url, sender=self)
logger.warn('Looks like a redirection to me: %s', self.url)
url = util.get_real_url(self.channel.authenticate_url(self.url))
log('Redirection resolved to: %s', url, sender=self)
logger.info('Redirection resolved to: %s', url)
episode_filename, _ = util.filename_from_url(url)
fn_template = util.sanitize_filename(episode_filename, self.MAX_FILENAME_LENGTH)
@ -452,7 +453,8 @@ class PodcastEpisode(PodcastModelObject):
# If the basename is empty, use the md5 hexdigest of the URL
if not fn_template or fn_template.startswith('redirect.'):
log('Report to bugs.gpodder.org: Podcast at %s with episode URL: %s', self.channel.url, self.url, sender=self)
logger.error('Report this feed: Podcast %s, episode %s',
self.channel.url, self.url)
fn_template = hashlib.md5(self.url).hexdigest()
# Find a unique filename for this episode
@ -468,20 +470,17 @@ class PodcastEpisode(PodcastModelObject):
new_file_name = os.path.join(self.channel.save_dir, wanted_filename)
old_file_name = os.path.join(self.channel.save_dir, self.download_filename)
if os.path.exists(old_file_name) and not os.path.exists(new_file_name):
log('Renaming %s => %s', old_file_name, new_file_name, sender=self)
logger.info('Renaming %s => %s', old_file_name, new_file_name)
os.rename(old_file_name, new_file_name)
elif force_update and not os.path.exists(old_file_name):
# When we call force_update, the file might not yet exist when we
# call it from the downloading code before saving the file
log('Choosing new filename: %s', new_file_name, sender=self)
logger.info('Choosing new filename: %s', new_file_name)
else:
log('Warning: %s exists or %s does not.', new_file_name, old_file_name, sender=self)
log('Updating filename of %s to "%s".', self.url, wanted_filename, sender=self)
logger.warn('%s exists or %s does not', new_file_name, old_file_name)
logger.info('Updating filename of %s to "%s".', self.url, wanted_filename)
elif self.download_filename is None:
log('Setting filename to "%s".', wanted_filename, sender=self)
else:
log('Should update filename. Stays the same (%s). Good!', \
wanted_filename, sender=self)
logger.info('Setting download filename: %s', wanted_filename)
self.download_filename = wanted_filename
self.save()
@ -559,7 +558,7 @@ class PodcastEpisode(PodcastModelObject):
try:
return datetime.datetime.fromtimestamp(self.published).strftime('%H%M')
except:
log('Cannot format published (time) for "%s".', self.title, sender=self)
logger.warn('Cannot format pubtime: %s', self.title, exc_info=True)
return '0000'
def playlist_title(self):
@ -583,14 +582,15 @@ class PodcastEpisode(PodcastModelObject):
pubdate_prop = property(fget=cute_pubdate)
def calculate_filesize( self):
def calculate_filesize(self):
filename = self.local_filename(create=False)
if filename is None:
log('calculate_filesized called, but filename is None!', sender=self)
return
try:
self.file_size = os.path.getsize(filename)
except:
log( 'Could not get filesize for %s.', self.url)
logger.error('Could not get file size: %s', filename, exc_info=True)
def is_finished(self):
"""Return True if this episode is considered "finished playing"
@ -619,7 +619,7 @@ class PodcastEpisode(PodcastModelObject):
def is_duplicate(self, episode):
if self.title == episode.title and self.published == episode.published:
log('Possible duplicate detected: %s', self.title)
logger.warn('Possible duplicate detected: %s', self.title)
return True
return False
@ -671,7 +671,7 @@ class PodcastChannel(PodcastModelObject):
basename = os.path.basename(filename)
existing = self.get_episode_by_filename(basename)
if existing:
log('Importing external download: %s', filename)
logger.info('Importing external download: %s', filename)
existing.on_downloaded(filename)
count += 1
continue
@ -680,7 +680,7 @@ class PodcastChannel(PodcastModelObject):
wanted_filename = episode.local_filename(create=True, \
return_wanted_filename=True)
if basename == wanted_filename:
log('Importing external download: %s', filename)
logger.info('Importing external download: %s', filename)
episode.download_filename = basename
episode.on_downloaded(filename)
count += 1
@ -699,7 +699,7 @@ class PodcastChannel(PodcastModelObject):
# if the wanted type is the same as the target type,
# assume that it's the correct file
if wanted_type is None or wanted_type == target_type:
log('Importing external download: %s', filename)
logger.info('Importing external download: %s', filename)
episode.download_filename = basename
episode.on_downloaded(filename)
found = True
@ -707,15 +707,15 @@ class PodcastChannel(PodcastModelObject):
break
if not found:
log('Unknown external file: %s', filename)
logger.warn('Unknown external file: %s', filename)
target_dir = os.path.join(self.save_dir, 'Unknown')
if util.make_directory(target_dir):
target_file = os.path.join(target_dir, basename)
log('Moving %s => %s', filename, target_file)
logger.info('Moving %s => %s', filename, target_file)
try:
shutil.move(filename, target_file)
except Exception, e:
log('Could not move file: %s', e, sender=self)
logger.error('Could not move file: %s', e, exc_info=True)
return count
@ -792,7 +792,6 @@ class PodcastChannel(PodcastModelObject):
for attribute in ('href', 'url'):
new_value = getattr(feed.feed.image, attribute, None)
if new_value is not None:
log('Found cover art in %s: %s', attribute, new_value)
self.cover_url = new_value
if hasattr(feed.feed, 'icon'):
@ -814,7 +813,7 @@ class PodcastChannel(PodcastModelObject):
key=lambda x: x.get('updated_parsed', (0,)*9), \
reverse=True)[:max_episodes]
except Exception, e:
log('Could not sort episodes: %s', e, sender=self, traceback=True)
logger.warn('Could not sort episodes: %s', e, exc_info=True)
entries = feed.entries[:max_episodes]
else:
entries = feed.entries
@ -837,20 +836,20 @@ class PodcastChannel(PodcastModelObject):
episode = self.EpisodeClass.from_feedparser_entry(entry, self, mimetype_prefs)
if episode is not None:
if not episode.title:
log('Using filename as title for episode at %s.', \
episode.url, sender=self)
logger.warn('Using filename as title for %s',
episode.url)
basename = os.path.basename(episode.url)
episode.title, ext = os.path.splitext(basename)
# Maemo bug 12073
if not episode.guid:
log('Using download URL as GUID for episode %s.', \
episode.title, sender=self)
logger.warn('Using download URL as GUID for %s',
episode.title)
episode.guid = episode.url
seen_guids.add(episode.guid)
except Exception, e:
log('Cannot instantiate episode: %s. Skipping.', e, sender=self, traceback=True)
logger.error('Skipping episode: %s', e, exc_info=True)
continue
if episode is None:
@ -875,7 +874,7 @@ class PodcastChannel(PodcastModelObject):
# published earlier than one week before the most
# recent existing episode, do not mark it as new.
if episode.published < last_published - self.SECONDS_PER_WEEK:
log('Episode with old date: %s', episode.title, sender=self)
logger.debug('Episode with old date: %s', episode.title)
episode.is_new = False
episode.save()
@ -888,8 +887,8 @@ class PodcastChannel(PodcastModelObject):
e.guid not in seen_guids)
for episode in episodes_to_purge:
log('Episode removed from feed: %s (%s)', episode.title, \
episode.guid, sender=self)
logger.debug('Episode removed from feed: %s (%s)',
episode.title, episode.guid)
self.db.delete_episode_by_guid(episode.guid, self.id)
# This *might* cause episodes to be skipped if there were more than
@ -1024,20 +1023,20 @@ class PodcastChannel(PodcastModelObject):
# rename folder if custom_title looks sane
new_folder_name = self.find_unique_folder_name(custom_title)
if len(new_folder_name) > 0 and new_folder_name != self.download_folder:
log('Changing download_folder based on custom title: %s', custom_title, sender=self)
new_folder = os.path.join(gpodder.downloads, new_folder_name)
old_folder = os.path.join(gpodder.downloads, self.download_folder)
if os.path.exists(old_folder):
if not os.path.exists(new_folder):
# Old folder exists, new folder does not -> simply rename
log('Renaming %s => %s', old_folder, new_folder, sender=self)
logger.info('Renaming %s => %s', old_folder, new_folder)
os.rename(old_folder, new_folder)
else:
# Both folders exist -> move files and delete old folder
log('Moving files from %s to %s', old_folder, new_folder, sender=self)
logger.info('Moving files from %s to %s', old_folder,
new_folder)
for file in glob.glob(os.path.join(old_folder, '*')):
shutil.move(file, new_folder)
log('Removing %s', old_folder, sender=self)
logger.info('Removing %s', old_folder)
shutil.rmtree(old_folder, ignore_errors=True)
self.download_folder = new_folder_name
self.save()
@ -1097,20 +1096,20 @@ class PodcastChannel(PodcastModelObject):
# if this is an empty string, try the basename
if len(fn_template) == 0:
log('That is one ugly feed you have here! (Report this to bugs.gpodder.org: %s)', self.url, sender=self)
logger.warn('That is one ugly feed you have here! (Report this to bugs.gpodder.org: %s)', self.url)
fn_template = util.sanitize_filename(os.path.basename(self.url), self.MAX_FOLDERNAME_LENGTH)
# If the basename is also empty, use the first 6 md5 hexdigest chars of the URL
if len(fn_template) == 0:
log('That is one REALLY ugly feed you have here! (Report this to bugs.gpodder.org: %s)', self.url, sender=self)
logger.warn('That is one REALLY ugly feed you have here! (Report this to bugs.gpodder.org: %s)', self.url)
fn_template = urldigest # no need for sanitize_filename here
# Find a unique folder name for this podcast
wanted_download_folder = self.find_unique_folder_name(fn_template)
# if the download_folder has not been set, check if the (old) md5 filename exists
# TODO: Remove this code for "tres" release (pre-0.15.0 not supported after migration)
if self.download_folder is None and os.path.exists(os.path.join(gpodder.downloads, urldigest)):
log('Found pre-0.15.0 download folder for %s: %s', self.title, urldigest, sender=self)
self.download_folder = urldigest
# we have a valid, new folder name in "current_try" -> use that!
@ -1121,24 +1120,27 @@ class PodcastChannel(PodcastModelObject):
if os.path.exists(old_folder_name):
if not os.path.exists(new_folder_name):
# Old folder exists, new folder does not -> simply rename
log('Renaming %s => %s', old_folder_name, new_folder_name, sender=self)
logger.info('Renaming %s => %s', old_folder_name,
new_folder_name)
os.rename(old_folder_name, new_folder_name)
else:
# Both folders exist -> move files and delete old folder
log('Moving files from %s to %s', old_folder_name, new_folder_name, sender=self)
logger.info('Moving files from %s to %s',
old_folder_name, new_folder_name)
for file in glob.glob(os.path.join(old_folder_name, '*')):
shutil.move(file, new_folder_name)
log('Removing %s', old_folder_name, sender=self)
logger.info('Removing %s', old_folder_name)
shutil.rmtree(old_folder_name, ignore_errors=True)
log('Updating download_folder of %s to "%s".', self.url, wanted_download_folder, sender=self)
logger.info('Updating download_folder of %s to %s', self.url,
wanted_download_folder)
self.download_folder = wanted_download_folder
self.save()
save_dir = os.path.join(gpodder.downloads, self.download_folder)
# Create save_dir if it does not yet exist
if not util.make_directory( save_dir):
log( 'Could not create save_dir: %s', save_dir, sender = self)
if not util.make_directory(save_dir):
logger.error('Could not create save_dir: %s', save_dir)
return save_dir

View File

@ -35,7 +35,8 @@ import sys
import threading
import time
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
from gpodder import util
from gpodder import minidb
@ -272,9 +273,9 @@ class MygPoClient(object):
def set_subscriptions(self, urls):
if self.can_access_webservice():
log('Uploading (overwriting) subscriptions...')
logger.debug('Uploading (overwriting) subscriptions...')
self._client.put_subscriptions(self.device_id, urls)
log('Subscription upload done.')
logger.debug('Subscription upload done.')
else:
raise Exception('Webservice access not enabled')
@ -289,19 +290,19 @@ class MygPoClient(object):
int(time.time()), None, None, None)
def on_delete(self, episodes):
log('Storing %d episode delete actions', len(episodes), sender=self)
logger.debug('Storing %d episode delete actions', len(episodes))
self._store.save(self._convert_episode(e, 'delete') for e in episodes)
def on_download(self, episodes):
log('Storing %d episode download actions', len(episodes), sender=self)
logger.debug('Storing %d episode download actions', len(episodes))
self._store.save(self._convert_episode(e, 'download') for e in episodes)
def on_playback_full(self, episode, start, end, total):
log('Storing full episode playback action', sender=self)
logger.debug('Storing full episode playback action')
self._store.save(self._convert_played_episode(episode, start, end, total))
def on_playback(self, episodes):
log('Storing %d episode playback actions', len(episodes), sender=self)
logger.debug('Storing %d episode playback actions', len(episodes))
self._store.save(self._convert_episode(e, 'play') for e in episodes)
def on_subscribe(self, urls):
@ -332,7 +333,7 @@ class MygPoClient(object):
# Store the current contents of the queue database
self._store.commit()
log('Worker thread waiting for timeout', sender=self)
logger.debug('Worker thread waiting for timeout')
time.sleep(self.FLUSH_TIMEOUT)
# Only work when enabled, UID set and allowed to work
@ -340,12 +341,12 @@ class MygPoClient(object):
(self._worker_thread is not None or forced):
self._worker_thread = None
log('Worker thread starting to work...', sender=self)
logger.debug('Worker thread starting to work...')
for retry in range(self.FLUSH_RETRIES):
must_retry = False
if retry:
log('Retrying flush queue...', sender=self)
logger.debug('Retrying flush queue...')
# Update the device first, so it can be created if new
for action in self._store.load(UpdateDeviceAction):
@ -372,41 +373,41 @@ class MygPoClient(object):
# No more pending actions. Ready to quit.
break
log('Worker thread finished.', sender=self)
logger.debug('Worker thread finished.')
else:
log('Worker thread may not execute (disabled).', sender=self)
logger.info('Worker thread may not execute (disabled).')
# Store the current contents of the queue database
self._store.commit()
def flush(self, now=False):
if not self.can_access_webservice():
log('Flush requested, but sync disabled.', sender=self)
logger.warn('Flush requested, but sync disabled.')
return
if self._worker_thread is None or now:
if now:
log('Flushing NOW.', sender=self)
logger.debug('Flushing NOW.')
else:
log('Flush requested.', sender=self)
logger.debug('Flush requested.')
self._worker_thread = threading.Thread(target=self._worker_proc, args=[now])
self._worker_thread.setDaemon(True)
self._worker_thread.start()
else:
log('Flush requested, already waiting.', sender=self)
logger.debug('Flush requested, already waiting.')
def on_config_changed(self, name=None, old_value=None, new_value=None):
if name in ('mygpo_username', 'mygpo_password', 'mygpo_server') \
or self._client is None:
self._client = api.MygPodderClient(self._config.mygpo_username,
self._config.mygpo_password, self._config.mygpo_server)
log('Reloading settings.', sender=self)
logger.info('Reloading settings.')
elif name.startswith('mygpo_device_'):
# Update or create the device
self.create_device()
def synchronize_episodes(self, actions):
log('Starting episode status sync.', sender=self)
logger.debug('Starting episode status sync.')
def convert_to_api(action):
dt = datetime.datetime.utcfromtimestamp(action.timestamp)
@ -439,14 +440,13 @@ class MygPoClient(object):
changes = self._client.download_episode_actions(since_o.since)
received_actions = [convert_from_api(a) for a in changes.actions]
log('Received %d episode actions', len(received_actions), \
sender=self)
logger.debug('Received %d episode actions', len(received_actions))
self._store.save(received_actions)
# Save the "since" value for later use
self._store.update(since_o, since=changes.since)
except Exception, e:
log('Exception while polling for episodes.', sender=self, traceback=True)
logger.warn('Exception while polling for episodes.', exc_info=True)
# Step 2: Upload Episode actions
@ -458,14 +458,14 @@ class MygPoClient(object):
# Actions have been uploaded to the server - remove them
self._store.remove(actions)
log('Episode actions have been uploaded to the server.', sender=self)
logger.debug('Episode actions have been uploaded to the server.')
return True
except Exception, e:
log('Cannot upload episode actions: %s', str(e), sender=self, traceback=True)
logger.error('Cannot upload episode actions: %s', str(e), exc_info=True)
return False
def synchronize_subscriptions(self, actions):
log('Starting subscription sync.', sender=self)
logger.debug('Starting subscription sync.')
try:
# Load the "since" value from the database
since_o = self._store.get(SinceValue, host=self.host, \
@ -485,12 +485,12 @@ class MygPoClient(object):
# Store received actions for later retrieval (and in case we
# have outdated actions in the database, simply remove them)
for url in result.add:
log('Received add action: %s', url, sender=self)
logger.debug('Received add action: %s', url)
self._store.remove(ReceivedSubscribeAction.remove(url))
self._store.remove(ReceivedSubscribeAction.add(url))
self._store.save(ReceivedSubscribeAction.add(url))
for url in result.remove:
log('Received remove action: %s', url, sender=self)
logger.debug('Received remove action: %s', url)
self._store.remove(ReceivedSubscribeAction.add(url))
self._store.remove(ReceivedSubscribeAction.remove(url))
self._store.save(ReceivedSubscribeAction.remove(url))
@ -502,7 +502,7 @@ class MygPoClient(object):
remove = [a.url for a in actions if a.is_remove]
if add or remove:
log('Uploading: +%d / -%d', len(add), len(remove), sender=self)
logger.debug('Uploading: +%d / -%d', len(add), len(remove))
# Only do a push request if something has changed
result = self._client.update_subscriptions(self.device_id, add, remove)
@ -512,26 +512,27 @@ class MygPoClient(object):
# Store URL rewrites for later retrieval by GUI
for old_url, new_url in result.update_urls:
if new_url:
log('Rewritten URL: %s', new_url, sender=self)
logger.debug('Rewritten URL: %s', new_url)
self._store.save(RewrittenUrl(old_url, new_url))
# Actions have been uploaded to the server - remove them
self._store.remove(actions)
log('All actions have been uploaded to the server.', sender=self)
logger.debug('All actions have been uploaded to the server.')
return True
except Exception, e:
log('Cannot upload subscriptions: %s', str(e), sender=self, traceback=True)
logger.error('Cannot upload subscriptions: %s', str(e), exc_info=True)
return False
def update_device(self, action):
try:
log('Uploading device settings...', sender=self)
logger.debug('Uploading device settings...')
self._client.update_device_settings(action.device_id, \
action.caption, action.device_type)
log('Device settings uploaded.', sender=self)
logger.debug('Device settings uploaded.')
return True
except Exception, e:
log('Cannot update device %s: %s', self.device_id, str(e), sender=self, traceback=True)
logger.error('Cannot update device %s: %s', self.device_id,
str(e), exc_info=True)
return False
def get_devices(self):

View File

@ -34,7 +34,8 @@ objects to valid OPML 1.1 files that can be used to backup
or distribute gPodder's channel subscriptions.
"""
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
from gpodder import util
@ -93,9 +94,9 @@ class Importer(object):
self.items.append( channel)
if not len(self.items):
log( 'OPML import finished, but no items found: %s', url, sender = self)
logger.info('OPML import finished, but no items found: %s', url)
except:
log( 'Cannot import OPML from URL: %s', url, traceback=True, sender = self)
logger.error('Cannot import OPML from URL: %s', url, exc_info=True)
@ -111,7 +112,6 @@ class Exporter(object):
def __init__( self, filename):
if filename is None:
log('OPML Exporter with None filename', sender=self)
self.filename = None
elif filename.endswith( '.opml') or filename.endswith( '.xml'):
self.filename = filename
@ -180,7 +180,7 @@ class Exporter(object):
available = util.get_free_disk_space(os.path.dirname(self.filename))
if available < 2*len(data)+FREE_DISK_SPACE_AFTER and not gpodder.win32:
# FIXME: get_free_disk_space still unimplemented for win32
log('Not enough free disk space to save channel list to %s', self.filename, sender = self)
logger.error('Not enough free disk space to save channel list to %s', self.filename)
return False
fp = open(self.filename+'.tmp', 'w')
fp.write(data)
@ -191,7 +191,8 @@ class Exporter(object):
else:
os.rename(self.filename+'.tmp', self.filename)
except:
log('Could not open file for writing: %s', self.filename, sender=self, traceback=True)
logger.error('Could not open file for writing: %s', self.filename,
exc_info=True)
return False
return True

View File

@ -24,7 +24,8 @@ from PySide.QtDeclarative import QDeclarativeImageProvider
from gpodder import youtube
from gpodder import util
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
import os
import urllib
@ -57,7 +58,7 @@ class LocalCachedImageProvider(QDeclarativeImageProvider):
cover_url = yt_url
data = util.urlopen(cover_url).read()
except Exception, e:
log('Error downloading cover: %s', e, sender=self)
logger.error('Error downloading cover: %s', e, exc_info=True)
data = ''
fp = open(filename, 'wb')
fp.write(data)

View File

@ -24,8 +24,6 @@ import gpodder
_ = gpodder.gettext
from gpodder.liblogger import log
from gpodder import model
from gpodder import util
from gpodder import youtube

View File

@ -25,11 +25,11 @@
#
import gpodder
from gpodder.liblogger import log
_ = gpodder.gettext
from gpodder import util
_ = gpodder.gettext
class ObservableService(object):
def __init__(self, signal_names=[]):
@ -41,25 +41,25 @@ class ObservableService(object):
if signal_name in self.observers:
if not observer in self.observers[signal_name]:
self.observers[signal_name].append(observer)
else:
log('Observer already added to signal "%s".', signal_name, sender=self)
else:
log('Signal "%s" is not available for registration.', signal_name, sender=self)
return True
return False
def unregister(self, signal_name, observer):
if signal_name in self.observers:
if observer in self.observers[signal_name]:
self.observers[signal_name].remove(observer)
else:
log('Observer could not be removed from signal "%s".', signal_name, sender=self)
else:
log('Signal "%s" is not available for un-registration.', signal_name, sender=self)
return True
return False
def notify(self, signal_name, *args):
if signal_name in self.observers:
for observer in self.observers[signal_name]:
util.idle_add(observer, *args)
else:
log('Signal "%s" is not available for notification.', signal_name, sender=self)
return True
return False

View File

@ -30,7 +30,9 @@ are not tied to any specific part of gPodder.
"""
import gpodder
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
import os
import os.path
@ -69,7 +71,7 @@ import locale
try:
locale.setlocale(locale.LC_ALL, '')
except Exception, e:
log('Warning: Cannot set locale (%s).', e)
logger.warn('Cannot set locale (%s)', e, exc_info=True)
# Native filesystem encoding detection
encoding = sys.getfilesystemencoding()
@ -78,7 +80,7 @@ if encoding is None:
if 'LANG' in os.environ and '.' in os.environ['LANG']:
lang = os.environ['LANG']
(language, encoding) = lang.rsplit('.', 1)
log('Detected encoding: %s', encoding)
logger.info('Detected encoding: %s', encoding)
elif gpodder.ui.fremantle:
encoding = 'utf-8'
elif gpodder.win32:
@ -88,7 +90,7 @@ if encoding is None:
encoding = 'mbcs'
else:
encoding = 'iso-8859-15'
log('Assuming encoding: ISO-8859-15 ($LANG not set).')
logger.info('Assuming encoding: ISO-8859-15 ($LANG not set).')
# Used by file_type_by_extension()
@ -107,7 +109,7 @@ def make_directory( path):
try:
os.makedirs( path)
except:
log( 'Could not create directory: %s', path)
logger.warn('Could not create directory: %s', path)
return False
return True
@ -285,9 +287,9 @@ def calculate_size( path):
try:
sum += calculate_size(os.path.join(path, item))
except:
log('Cannot get size for %s', path)
logger.warn('Cannot get size for %s', path, exc_info=True)
except:
log('Cannot access: %s', path)
logger.warn('Cannot access %s', path, exc_info=True)
return sum
@ -311,7 +313,7 @@ def file_modification_datetime(filename):
timestamp = s[stat.ST_MTIME]
return datetime.datetime.fromtimestamp(timestamp)
except:
log('Cannot get modification timestamp for %s', filename)
logger.warn('Cannot get mtime for %s', filename, exc_info=True)
return None
@ -320,13 +322,16 @@ def file_modification_timestamp(filename):
Returns the modification date of the specified file as a number
or -1 if the modification date cannot be determined.
"""
# TODO: Merge with file_modification_datetime
if filename is None:
return -1
try:
s = os.stat(filename)
return s[stat.ST_MTIME]
except:
log('Cannot get modification timestamp for %s', filename)
logger.warn('Cannot get mtime for %s', filename, exc_info=True)
return -1
@ -375,7 +380,7 @@ def get_free_disk_space_win32(path):
userFree, userTotal, freeOnDisk = win32file.GetDiskFreeSpaceEx(drive)
return userFree
except ImportError:
log('Warning: Running on Win32 but win32api/win32file not installed.')
logger.warn('Running on Win32 but win32api/win32file not installed.')
# Cannot determine free disk space
return 0
@ -421,7 +426,7 @@ def format_date(timestamp):
try:
timestamp_date = time.localtime(timestamp)[:3]
except ValueError, ve:
log('Warning: Cannot convert timestamp', traceback=True)
logger.warn('Cannot convert timestamp', exc_info=True)
return None
if timestamp_date == today:
@ -432,7 +437,7 @@ def format_date(timestamp):
try:
diff = int( (time.time() - timestamp)/seconds_in_a_day )
except:
log('Warning: Cannot convert "%s" to date.', timestamp, traceback=True)
logger.warn('Cannot convert "%s" to date.', timestamp, exc_info=True)
return None
try:
@ -759,7 +764,7 @@ def object_string_formatter( s, **kwargs):
to_s = getattr( o, attr)
result = result.replace( from_s, to_s)
except:
log( 'Could not replace attribute "%s" in string "%s".', attr, s)
logger.warn('Could not replace attribute "%s" in string "%s".', attr, s)
return result
@ -912,7 +917,7 @@ def get_real_url(url):
try:
return urlopen(url).geturl()
except:
log('Error getting real url for %s', url, traceback=True)
logger.error('Getting real url for %s', url, exc_info=True)
return url
@ -1017,7 +1022,7 @@ def bluetooth_send_file(filename):
command_line.append(filename)
return (subprocess.Popen(command_line).wait() == 0)
else:
log('Cannot send file. Please install "bluetooth-sendto" or "gnome-obex-send".')
logger.error('Cannot send file. Please install "bluetooth-sendto" or "gnome-obex-send".')
return False
@ -1135,14 +1140,14 @@ def get_episode_info_from_url(url):
r = http_request(url)
result = {}
log('Trying to get metainfo for %s', url)
logger.debug('Trying to get metainfo for %s', url)
if 'content-length' in r.msg:
try:
length = int(r.msg['content-length'])
result['length'] = length
except ValueError, e:
log('Error converting content-length header.')
logger.error('Converting content-length header.', exc_info=True)
if 'last-modified' in r.msg:
try:
@ -1150,7 +1155,7 @@ def get_episode_info_from_url(url):
pubdate = time.mktime(parsed_date)
result['pubdate'] = pubdate
except:
log('Error converting last-modified header.')
logger.error('Converting last-modified header.', exc_info=True)
return result
@ -1169,10 +1174,10 @@ def gui_open(filename):
try:
import osso
except ImportError, ie:
log('Cannot import osso module on maemo.')
logger.warn('Cannot import osso module on maemo.')
return False
log('Using Nokia Media Player to open %s', filename)
logger.debug('Using Nokia Media Player to open %s', filename)
context = osso.Context('gPodder', gpodder.__version__, False)
filename = filename.encode('utf-8')
@ -1199,7 +1204,7 @@ def gui_open(filename):
subprocess.Popen(['xdg-open', filename])
return True
except:
log('Cannot open file/folder: "%s"', filename, traceback=True)
logger.error('Cannot open file/folder: "%s"', filename, exc_info=True)
return False
@ -1256,7 +1261,8 @@ def sanitize_filename(filename, max_length=0, use_ascii=False):
filename = filename.decode(encoding, 'ignore')
if max_length > 0 and len(filename) > max_length:
log('Limiting file/folder name "%s" to %d characters.', filename, max_length)
logger.info('Limiting file/folder name "%s" to %d characters.',
filename, max_length)
filename = filename[:max_length]
return re.sub('[/|?*<>:+\[\]\"\\\]', '_', filename.strip().encode(e, 'ignore'))
@ -1414,17 +1420,17 @@ def run_external_command(command_line):
"""
def open_process(command_line):
log('Running external command: %s', command_line)
logger.debug('Running external command: %s', command_line)
p = subprocess.Popen(command_line, shell=True)
result = p.wait()
if result == 127:
log('Command not found: %s', command_line)
logger.error('Command not found: %s', command_line)
elif result == 126:
log('Command permission denied: %s', command_line)
logger.error('Command permission denied: %s', command_line)
elif result > 0:
log('Command returned an error (%d): %s', result, command_line)
logger.error('Command returned an error (%d): %s', result, command_line)
else:
log('Command finished successfully: %s', command_line)
logger.debug('Command finished successfully: %s', command_line)
threading.Thread(target=open_process, args=(command_line,)).start()

View File

@ -24,7 +24,9 @@
import gpodder
from gpodder import util
from gpodder.liblogger import log
import logging
logger = logging.getLogger(__name__)
try:
import simplejson as json
@ -115,7 +117,7 @@ def get_real_download_url(url, preferred_fmt_id=None):
# If the format is available and preferred (or lower),
# use the given format for our fmt_id
if id in formats_available and seen_preferred:
log('Found available YouTube format: %s (fmt_id=%d)', \
logger.info('Found YouTube format: %s (fmt_id=%d)',
description, id)
fmt_id = id
break
@ -146,7 +148,7 @@ def get_real_channel_url(url):
if m is not None:
next = 'http://www.youtube.com/rss/user/'+ m.group(1) +'/videos.rss'
log('YouTube link resolved: %s => %s', url, next)
logger.debug('YouTube link resolved: %s => %s', url, next)
return next
r = re.compile('http://(?:[a-z]+\.)?youtube\.com/profile?user=([a-z0-9]+)', re.IGNORECASE)
@ -154,7 +156,7 @@ def get_real_channel_url(url):
if m is not None:
next = 'http://www.youtube.com/rss/user/'+ m.group(1) +'/videos.rss'
log('YouTube link resolved: %s => %s', url, next)
logger.debug('YouTube link resolved: %s => %s', url, next)
return next
return url
@ -170,7 +172,7 @@ def get_real_cover(url):
data = util.urlopen(api_url).read()
match = re.search('<media:thumbnail url=[\'"]([^\'"]+)[\'"]/>', data)
if match is not None:
log('YouTube userpic for %s is: %s', url, match.group(1))
logger.debug('YouTube userpic for %s is: %s', url, match.group(1))
return match.group(1)
return None