Database schema cleanup / upgrade

This commit is contained in:
Thomas Perl 2010-12-20 14:35:46 +01:00
parent 3e59385337
commit 292c82ab00
19 changed files with 406 additions and 696 deletions

View File

@ -82,7 +82,7 @@ class Podcast(object):
feed updates are disabled, and the podcast should be
excluded from automatic updates.
"""
return self._podcast.feed_update_enabled
return not self._podcast.pause_subscription
def update(self):
"""Updates this podcast by downloading the feed
@ -98,25 +98,25 @@ class Podcast(object):
Display the feed update current status.
"""
if self._podcast.feed_update_enabled:
return "enabled"
else:
if self._podcast.pause_subscription:
return "disabled"
else:
return "enabled"
def feed_update_status(self):
"""Return the feed update status
Return the feed update current status.
"""
return self._podcast.feed_update_enabled
return not self._podcast.pause_subscription
def disable(self):
"""Toogle the feed update to disable
Change the feed update status to disable only if currently enable.
"""
if self._podcast.feed_update_enabled:
self._podcast.feed_update_enabled = False
if not self._podcast.pause_subscription:
self._podcast.pause_subscription = True
self._podcast.save()
def enable(self):
@ -124,8 +124,8 @@ class Podcast(object):
Change the feed update status to disable only if currently enable.
"""
if not self._podcast.feed_update_enabled:
self._podcast.feed_update_enabled = True
if self._podcast.pause_subscription:
self._podcast.pause_subscription = False
self._podcast.save()
class Episode(object):

View File

@ -54,63 +54,43 @@ import re
class Database(object):
UNICODE_TRANSLATE = {ord(u'ö'): u'o', ord(u'ä'): u'a', ord(u'ü'): u'u'}
# Column names, types, required and default values for the channels table
TABLE_CHANNELS = "channels"
SCHEMA_CHANNELS = (
('id', 'INTEGER PRIMARY KEY', True, '-1'),
('url', 'TEXT', True, "''"), # Feed (RSS/Atom) URL of the podcast
('title', 'TEXT', True, "''"), # Podcast name
('override_title', 'TEXT', True, "''"), # Podcast name if user-defined
('link', 'TEXT', True, "''"), # Website URL for the podcast
('description', 'TEXT', False, None), # Description of podcast contents
('image', 'TEXT', False, None), # URL to cover art for the image
('pubDate', 'INTEGER', True, '0'), # Date and time of last feed publication
('username', 'TEXT', True, "''"), # Username for HTTP authentication (feed update + downloads)
('password', 'TEXT', True, "''"), # Password for HTTP authentication (feed update + downloads)
('last_modified', 'TEXT', False, None), # Last-modified HTTP header from last update
('etag', 'TEXT', False, None), # ETag HTTP header from last update
('channel_is_locked', 'INTEGER', True, '0'), # 1 if deletion is prevented, 0 otherwise
('foldername', 'TEXT', True, "''"), # Folder name (basename) to put downloaded episodes
('auto_foldername', 'INTEGER', True, '1'), # 1 if the foldername was auto-generated, 0 otherwise
('updated_timestamp', 'INTEGER', True, '0'), # Timestamp of the last feed update
('feed_update_enabled', 'INTEGER', True, '1'), # 0 to skip this feed when checking for new episodes
)
INDEX_CHANNELS = (
('foldername', 'UNIQUE INDEX'),
('url', 'UNIQUE INDEX'),
('title', 'INDEX'),
# Column names, types, required and default values for the podcasts table
TABLE_PODCAST = 'podcast'
COLUMNS_PODCAST = (
'title',
'url',
'link',
'description',
'cover_url',
'published',
'auth_username',
'auth_password',
'http_last_modified',
'http_etag',
'auto_archive_episodes',
'download_folder',
'pause_subscription',
)
# Column names and types for the episodes table
TABLE_EPISODES = 'episodes'
SCHEMA_EPISODES = (
('id', 'INTEGER PRIMARY KEY', True, '-1'),
('channel_id', 'INTEGER', True, '-1'), # Foreign key: ID of the podcast of this episode
('url', 'TEXT', True, "''"), # Download URL of the media file
('title', 'TEXT', True, "''"), # Episode title
('length', 'INTEGER', True, '0'), # File length of the media file in bytes
('mimetype', 'TEXT', True, "''"), # Mime type of the media file
('guid', 'TEXT', True, "''"), # GUID of the episode item
('description', 'TEXT', True, "''"), # Longer text description
('link', 'TEXT', True, "''"), # Website URL for the episode
('pubDate', 'INTEGER', True, '0'), # Date and time of publication
('state', 'INTEGER', True, '0'), # Download state (see gpodder.STATE_* constants)
('played', 'INTEGER', True, '1'), # 1 if it's new or played, 0 otherwise
('locked', 'INTEGER', True, '0'), # 1 if deletion is prevented, 0 otherwise
('filename', 'TEXT', False, None), # Filename for the downloaded file (or NULL)
('auto_filename', 'INTEGER', True, '0'), # 1 if the filename was auto-generated, 0 otherwise
('total_time', 'INTEGER', True, '0'), # Length in seconds
('current_position', 'INTEGER', True, '0'), # Current playback position
('current_position_updated', 'INTEGER', True, '0'), # Set to NOW when updating current_position
)
INDEX_EPISODES = (
('guid', 'INDEX'),
('filename', 'UNIQUE INDEX'),
('channel_id', 'INDEX'),
('pubDate', 'INDEX'),
('state', 'INDEX'),
('played', 'INDEX'),
('locked', 'INDEX'),
TABLE_EPISODE = 'episode'
COLUMNS_EPISODE = (
'podcast_id',
'title',
'description',
'url',
'published',
'guid',
'link',
'file_size',
'mime_type',
'state',
'is_new',
'archive',
'download_filename',
'total_time',
'current_position',
'current_position_updated',
)
def __init__(self, filename):
@ -121,11 +101,11 @@ class Database(object):
def close(self):
self.commit()
cur = self.cursor(lock=True)
log('Optimizing database for faster startup.', sender=self)
cur.execute("VACUUM")
cur.close()
self.lock.release()
with self.lock:
cur = self.cursor()
log('Optimizing database for faster startup.', sender=self)
cur.execute("VACUUM")
cur.close()
self._db.close()
self._db = None
@ -137,25 +117,25 @@ class Database(object):
except TypeError, e:
log('Exception in log(): %s: %s', e, message, sender=self)
def purge(self, max_episodes, channel_id):
def purge(self, max_episodes, podcast_id):
"""
Deletes old episodes. Should be called
before adding new episodes to a channel.
before adding new episodes to a podcast.
"""
cur = self.cursor(lock=True)
with self.lock:
cur = self.cursor()
self.log("purge(%s)", channel_id)
sql = """
DELETE FROM episodes
WHERE channel_id = ?
AND state <> ?
AND id NOT IN
(SELECT id FROM episodes WHERE channel_id = ?
ORDER BY pubDate DESC LIMIT ?)"""
cur.execute(sql, (channel_id, gpodder.STATE_DOWNLOADED, channel_id, max_episodes))
self.log("purge(%s)", podcast_id)
sql = """
DELETE FROM %s
WHERE podcast_id = ?
AND state <> ?
AND id NOT IN
(SELECT id FROM %s WHERE podcast_id = ?
ORDER BY published DESC LIMIT ?)""" % (self.TABLE_EPISODE, self.TABLE_EPISODE)
cur.execute(sql, (podcast_id, gpodder.STATE_DOWNLOADED, podcast_id, max_episodes))
cur.close()
self.lock.release()
cur.close()
def db_sort_cmp(self, a, b):
"""
@ -185,12 +165,9 @@ class Database(object):
self._db.text_factory = str
self._db.create_collation("UNICODE", self.db_sort_cmp)
self.log('Connected')
self.__check_schema()
return self._db
def cursor(self, lock=False):
if lock:
self.lock.acquire()
def cursor(self):
return self.db.cursor()
def commit(self):
@ -202,246 +179,168 @@ class Database(object):
log('Error commiting changes: %s', e, sender=self, traceback=True)
self.lock.release()
def _remove_deleted_channels(self):
"""Remove deleted podcasts and episodes (upgrade from gPodder <= 2.5)
If the database has been created with gPodder <= 2.5, it could
be possible that podcasts have been deleted where metadata and
episodes are kept.
We don't support this kind of "information keeping" anymore, so
simply go ahead and remove all podcast marked as "deleted" and
their corresponding episodes to slim down the database.
"""
cur = self.cursor(lock=True)
cur.execute("PRAGMA table_info(%s)" % self.TABLE_CHANNELS)
available = cur.fetchall()
if available:
ID, NAME, TYPE, NOTNULL, DEFAULT = range(5)
existing = set(column[NAME] for column in available)
if 'deleted' in existing:
cur.execute('SELECT id FROM %s WHERE deleted = ?' % self.TABLE_CHANNELS, (1,))
channel_ids = [id for (id,) in cur]
# Remove all deleted channels from the database
for id in channel_ids:
self.log('Removing deleted channel with ID %d', id)
cur.execute('DELETE FROM %s WHERE id = ?' % self.TABLE_CHANNELS, (id,))
cur.execute('DELETE FROM %s WHERE channel_id = ?' % self.TABLE_EPISODES, (id,))
self.lock.release()
def _remove_orphaned_episodes(self):
"""Remove episodes without a corresponding podcast
In some weird circumstances, it can happen that episodes are
left in the database that do not have a fitting podcast in the
database. This is an inconsistency. We simply delete the
episode information in this case, as we can't find a podcast.
"""
cur = self.cursor(lock=True)
sql = 'DELETE FROM %s WHERE channel_id NOT IN ' + \
'(SELECT DISTINCT id FROM %s)'
cur.execute(sql % (self.TABLE_EPISODES, self.TABLE_CHANNELS,))
self.lock.release()
def __check_schema(self):
"""
Creates all necessary tables and indexes that don't exist.
"""
self.log('Setting up tables and views')
cur = self.cursor(lock=True)
# If a "deleted" column exists in the channel table, remove all
# corresponding channels and their episodes and remove it
self._remove_deleted_channels()
# Create tables and possibly add newly-added columns
self.upgrade_table(self.TABLE_CHANNELS, self.SCHEMA_CHANNELS, self.INDEX_CHANNELS)
self.upgrade_table(self.TABLE_EPISODES, self.SCHEMA_EPISODES, self.INDEX_EPISODES)
# Remove orphaned episodes (episodes without a corresponding
# channel object) from the database to keep the DB clean
self._remove_orphaned_episodes()
# Make sure deleted episodes are played, to simplify querying statistics.
try:
cur.execute("UPDATE episodes SET played = 1 WHERE state = ?", (gpodder.STATE_DELETED,))
except OperationalError:
pass
cur.close()
self.lock.release()
def get_channel_count(self, id):
"""Given a channel ID, returns the statistics for it
def get_podcast_statistics(self, id):
"""Given a podcast ID, returns the statistics for it
Returns a tuple (total, deleted, new, downloaded, unplayed)
"""
total, deleted, new, downloaded, unplayed = 0, 0, 0, 0, 0
cur = self.cursor(lock=True)
cur.execute('SELECT COUNT(*), state, played FROM episodes WHERE channel_id = ? GROUP BY state, played', (id,))
for count, state, played in cur:
total += count
if state == gpodder.STATE_DELETED:
deleted += count
elif state == gpodder.STATE_NORMAL and not played:
new += count
elif state == gpodder.STATE_DOWNLOADED and not played:
downloaded += count
unplayed += count
elif state == gpodder.STATE_DOWNLOADED:
downloaded += count
with self.lock:
cur = self.cursor()
cur.execute('SELECT COUNT(*), state, is_new FROM %s WHERE podcast_id = ? GROUP BY state, is_new' % self.TABLE_EPISODE, (id,))
for count, state, is_new in cur:
total += count
if state == gpodder.STATE_DELETED:
deleted += count
elif state == gpodder.STATE_NORMAL and is_new:
new += count
elif state == gpodder.STATE_DOWNLOADED and not is_new:
downloaded += count
unplayed += count
elif state == gpodder.STATE_DOWNLOADED:
downloaded += count
cur.close()
self.lock.release()
cur.close()
return (total, deleted, new, downloaded, unplayed)
def get_total_count(self):
"""Get statistics for episodes in all channels
"""Get statistics for episodes in all podcasts
Returns a tuple (total, deleted, new, downloaded, unplayed)
"""
total, deleted, new, downloaded, unplayed = 0, 0, 0, 0, 0
cur = self.cursor(lock=True)
cur.execute('SELECT COUNT(*), state, played FROM episodes GROUP BY state, played')
for count, state, played in cur:
total += count
if state == gpodder.STATE_DELETED:
deleted += count
elif state == gpodder.STATE_NORMAL and not played:
new += count
elif state == gpodder.STATE_DOWNLOADED and not played:
downloaded += count
unplayed += count
elif state == gpodder.STATE_DOWNLOADED:
downloaded += count
with self.lock:
cur = self.cursor()
cur.execute('SELECT COUNT(*), state, is_new FROM %s GROUP BY state, is_new' % self.TABLE_EPISODE)
for count, state, is_new in cur:
total += count
if state == gpodder.STATE_DELETED:
deleted += count
elif state == gpodder.STATE_NORMAL and is_new:
new += count
elif state == gpodder.STATE_DOWNLOADED and is_new:
downloaded += count
unplayed += count
elif state == gpodder.STATE_DOWNLOADED:
downloaded += count
cur.close()
self.lock.release()
cur.close()
return (total, deleted, new, downloaded, unplayed)
def load_channels(self, factory=None, url=None):
def load_podcasts(self, factory=None, url=None):
"""
Returns channel descriptions as a list of dictionaries or objects,
Returns podcast descriptions as a list of dictionaries or objects,
returned by the factory() function, which receives the dictionary
as the only argument.
"""
self.log("load_channels()")
self.log("load_podcasts()")
cur = self.cursor(lock=True)
cur.execute('SELECT * FROM %s ORDER BY title COLLATE UNICODE' % self.TABLE_CHANNELS)
with self.lock:
cur = self.cursor()
cur.execute('SELECT * FROM %s ORDER BY title COLLATE UNICODE' % self.TABLE_PODCAST)
result = []
keys = list(desc[0] for desc in cur.description)
for row in cur:
channel = dict(zip(keys, row))
result = []
keys = list(desc[0] for desc in cur.description)
for row in cur:
podcast = dict(zip(keys, row))
if url is None or url == channel['url']:
if factory is None:
result.append(channel)
else:
result.append(factory(channel, self))
if url is None or url == podcast['url']:
if factory is None:
result.append(podcast)
else:
result.append(factory(podcast, self))
cur.close()
self.lock.release()
cur.close()
return result
def save_channel(self, c):
self._save_object(c, self.TABLE_CHANNELS, self.SCHEMA_CHANNELS)
def save_podcast(self, podcast):
self._save_object(podcast, self.TABLE_PODCAST, self.COLUMNS_PODCAST)
def delete_channel(self, channel):
assert channel.id is not None
def delete_podcast(self, podcast):
assert podcast.id
cur = self.cursor(lock=True)
self.log("delete_channel(%d), %s", channel.id, channel.url)
with self.lock:
cur = self.cursor()
self.log("delete_podcast(%d), %s", podcast.id, podcast.url)
cur.execute("DELETE FROM channels WHERE id = ?", (channel.id, ))
cur.execute("DELETE FROM episodes WHERE channel_id = ?", (channel.id, ))
cur.execute("DELETE FROM %s WHERE id = ?" % self.TABLE_PODCAST, (podcast.id, ))
cur.execute("DELETE FROM %s WHERE podcast_id = ?" % self.TABLE_EPISODE, (podcast.id, ))
cur.close()
# Commit changes
self.db.commit()
self.lock.release()
cur.close()
# Commit changes
self.db.commit()
def load_all_episodes(self, channel_mapping, limit=10000):
def load_all_episodes(self, podcast_mapping, limit=10000):
self.log('Loading all episodes from the database')
sql = 'SELECT * FROM %s ORDER BY pubDate DESC LIMIT ?' % (self.TABLE_EPISODES,)
sql = 'SELECT * FROM %s ORDER BY published DESC LIMIT ?' % (self.TABLE_EPISODE,)
args = (limit,)
cur = self.cursor(lock=True)
cur.execute(sql, args)
keys = [desc[0] for desc in cur.description]
id_index = keys.index('channel_id')
result = map(lambda row: channel_mapping[row[id_index]].episode_factory(dict(zip(keys, row))), cur)
cur.close()
self.lock.release()
with self.lock:
cur = self.cursor()
cur.execute(sql, args)
keys = [desc[0] for desc in cur.description]
id_index = keys.index('podcast_id')
result = map(lambda row: podcast_mapping[row[id_index]].episode_factory(dict(zip(keys, row))), cur)
cur.close()
return result
def load_episodes(self, channel, factory=lambda x: x, limit=1000, state=None):
assert channel.id is not None
def load_episodes(self, podcast, factory=lambda x: x, limit=1000, state=None):
assert podcast.id
self.log('Loading episodes for channel %d', channel.id)
self.log('Loading episodes for podcast %d', podcast.id)
if state is None:
sql = 'SELECT * FROM %s WHERE channel_id = ? ORDER BY pubDate DESC LIMIT ?' % (self.TABLE_EPISODES,)
args = (channel.id, limit)
sql = 'SELECT * FROM %s WHERE podcast_id = ? ORDER BY published DESC LIMIT ?' % (self.TABLE_EPISODE,)
args = (podcast.id, limit)
else:
sql = 'SELECT * FROM %s WHERE channel_id = ? AND state = ? ORDER BY pubDate DESC LIMIT ?' % (self.TABLE_EPISODES,)
args = (channel.id, state, limit)
sql = 'SELECT * FROM %s WHERE podcast_id = ? AND state = ? ORDER BY published DESC LIMIT ?' % (self.TABLE_EPISODE,)
args = (podcast.id, state, limit)
cur = self.cursor(lock=True)
cur.execute(sql, args)
keys = [desc[0] for desc in cur.description]
result = map(lambda row: factory(dict(zip(keys, row)), self), cur)
cur.close()
self.lock.release()
with self.lock:
cur = self.cursor()
cur.execute(sql, args)
keys = [desc[0] for desc in cur.description]
result = map(lambda row: factory(dict(zip(keys, row)), self), cur)
cur.close()
return result
def load_single_episode(self, channel, factory=lambda x: x, **kwargs):
def load_single_episode(self, podcast, factory=lambda x: x, **kwargs):
"""Load one episode with keywords
Return an episode object (created by "factory") for a
given channel. You can use keyword arguments to specify
given podcast. You can use keyword arguments to specify
the attributes that the episode object should have.
Example:
db.load_single_episode(channel, url='x')
This will search all episodes belonging to "channel"
and return the first one where the "url" column is "x".
Returns None if the episode cannot be found.
"""
assert channel.id is not None
assert podcast.id
# Inject channel_id into query to reduce search space
kwargs['channel_id'] = channel.id
# Inject podcast_id into query to reduce search space
kwargs['podcast_id'] = podcast.id
# We need to have the keys in the same order as the values, so
# we use items() and unzip the resulting list into two ordered lists
keys, args = zip(*kwargs.items())
sql = 'SELECT * FROM %s WHERE %s LIMIT 1' % (self.TABLE_EPISODES, \
sql = 'SELECT * FROM %s WHERE %s LIMIT 1' % (self.TABLE_EPISODE, \
' AND '.join('%s=?' % k for k in keys))
cur = self.cursor(lock=True)
cur.execute(sql, args)
keys = [desc[0] for desc in cur.description]
row = cur.fetchone()
if row:
result = factory(dict(zip(keys, row)), self)
else:
result = None
with self.lock:
cur = self.cursor()
cur.execute(sql, args)
keys = [desc[0] for desc in cur.description]
row = cur.fetchone()
if row:
result = factory(dict(zip(keys, row)), self)
else:
result = None
cur.close()
self.lock.release()
cur.close()
return result
def load_episode(self, id):
@ -450,51 +349,34 @@ class Database(object):
This will return the data for an episode as
dictionary or None if it does not exist.
"""
assert id is not None
assert id
cur = self.cursor(lock=True)
cur.execute('SELECT * from %s WHERE id = ? LIMIT 1' % (self.TABLE_EPISODES,), (id,))
try:
d = dict(zip((desc[0] for desc in cur.description), cur.fetchone()))
cur.close()
self.log('Loaded episode %d from DB', id)
self.lock.release()
return d
except:
cur.close()
self.lock.release()
return None
with self.lock:
cur = self.cursor()
cur.execute('SELECT * from %s WHERE id = ? LIMIT 1' % (self.TABLE_EPISODE,), (id,))
try:
d = dict(zip((desc[0] for desc in cur.description), cur.fetchone()))
cur.close()
self.log('Loaded episode %d from DB', id)
return d
except:
cur.close()
return None
def get_channel_id_from_episode_url(self, url):
"""Return the (first) associated channel ID given an episode URL"""
assert url is not None
def get_podcast_id_from_episode_url(self, url):
"""Return the (first) associated podcast ID given an episode URL"""
assert url
return self.get('SELECT podcast_id FROM %s WHERE url = ? LIMIT 1' % (self.TABLE_EPISODE,), (url,))
cur = self.cursor(lock=True)
cur.execute('SELECT channel_id FROM %s WHERE url = ? LIMIT 1' % (self.TABLE_EPISODES,), (url,))
try:
row = cur.fetchone()
if row is not None:
self.log('Found channel ID: %d', int(row[0]), sender=self)
return int(row[0])
finally:
self.lock.release()
def save_episode(self, episode):
assert episode.podcast_id
assert episode.guid
self._save_object(episode, self.TABLE_EPISODE, self.COLUMNS_EPISODE)
return None
def save_episode(self, e):
assert e.channel_id
if not e.guid:
self.log('Refusing to save an episode without guid: %s', e)
return
self._save_object(e, self.TABLE_EPISODES, self.SCHEMA_EPISODES)
def _save_object(self, o, table, schema):
def _save_object(self, o, table, columns):
self.lock.acquire()
try:
cur = self.cursor()
columns = [name for name, typ, required, default in schema if name != 'id']
values = [getattr(o, name) for name in columns]
if o.id is None:
@ -513,175 +395,64 @@ class Database(object):
cur.close()
self.lock.release()
def save_downloaded_episode(self, episode):
assert episode.id is not None
cur = self.cursor(lock=True)
cur.execute('UPDATE episodes SET state = ?, played = ?, length = ? WHERE id = ?', \
(episode.state, episode.is_played, episode.length, episode.id))
cur.close()
self.lock.release()
def update_episode_state(self, episode):
assert episode.id is not None
cur = self.cursor(lock=True)
cur.execute('UPDATE episodes SET state = ?, played = ?, locked = ? WHERE id = ?', (episode.state, episode.is_played, episode.is_locked, episode.id))
cur.close()
self.lock.release()
with self.lock:
cur = self.cursor()
cur.execute('UPDATE %s SET state = ?, is_new = ?, archive = ? WHERE id = ?' % (self.TABLE_EPISODE,), (episode.state, episode.is_new, episode.archive, episode.id))
cur.close()
def update_channel_lock(self, channel):
assert channel.id is not None
self.log("update_channel_lock(%s, locked=%s)", channel.url, channel.channel_is_locked)
cur = self.cursor(lock=True)
cur.execute("UPDATE channels SET channel_is_locked = ? WHERE id = ?", (channel.channel_is_locked, channel.id, ))
cur.close()
self.lock.release()
def __get__(self, sql, params=None):
def get(self, sql, params=None):
"""
Returns the first cell of a query result, useful for COUNT()s.
"""
cur = self.cursor(lock=True)
with self.lock:
cur = self.cursor()
self.log("__get__(): %s", sql)
self.log("get(): %s", sql)
if params is None:
cur.execute(sql)
else:
cur.execute(sql, params)
if params is None:
cur.execute(sql)
else:
cur.execute(sql, params)
row = cur.fetchone()
cur.close()
self.lock.release()
row = cur.fetchone()
cur.close()
if row is None:
return None
else:
return row[0]
def channel_foldername_exists(self, foldername):
def podcast_download_folder_exists(self, foldername):
"""
Returns True if a foldername for a channel exists.
False otherwise.
"""
return self.__get__("SELECT id FROM channels WHERE foldername = ?", (foldername,)) is not None
return self.get("SELECT id FROM %s WHERE download_folder = ?" % self.TABLE_PODCAST, (foldername,)) is not None
def episode_filename_exists(self, filename):
"""
Returns True if a filename for an episode exists.
False otherwise.
"""
return self.__get__("SELECT id FROM episodes WHERE filename = ?", (filename,)) is not None
return self.get("SELECT id FROM %s WHERE download_filename = ?" % self.TABLE_EPISODE, (filename,)) is not None
def get_last_pubdate(self, channel):
def get_last_published(self, podcast):
"""
Look up the highest "pubDate" value for
all episodes of the given podcast.
Look up the most recent publish date of a podcast.
"""
return self.__get__('SELECT MAX(pubDate) FROM episodes WHERE channel_id = ?', (channel.id,))
return self.get('SELECT MAX(published) FROM %s WHERE podcast_id = ?' % self.TABLE_EPISODE, (podcast.id,))
def force_last_new(self, channel):
"""
Only set the most-recent episode as "new"; this
should be called when a new podcast is added.
"""
cur = self.cursor(lock=True)
cur.execute("""
UPDATE episodes
SET played = ?
WHERE channel_id = ? AND
pubDate < (SELECT MAX(pubDate)
FROM episodes
WHERE channel_id = ?)
""", (True, channel.id, channel.id))
cur.close()
self.lock.release()
def recreate_table(self, cur, table_name, fields, index_list):
log('Rename table %s', table_name, sender=self)
new_table_name = table_name + "_save"
cur.execute("ALTER TABLE %s RENAME TO %s" % (table_name, new_table_name))
#log("ALTER TABLE %s RENAME TO %s" % (table_name, new_table_name))
log('Delete existing indices', sender=self)
for column, typ in index_list:
cur.execute('DROP INDEX IF EXISTS idx_%s' % (column))
self.create_table(cur, table_name, fields)
log('Correct NULL values in the existing data', sender=self)
columns = set((column, default) for column, typ, required, default in fields if required)
for column, default in columns:
cur.execute('UPDATE %s SET %s = %s where %s IS NULL' % (new_table_name, column, default, column))
log('Copy data from table %s to table %s' % (new_table_name, table_name), sender=self)
columns = ', '.join(f[0] for f in fields)
cur.execute("INSERT INTO %(tab)s (%(col)s) SELECT %(col)s FROM %(new_tab)s" %
{'tab': table_name, 'col': columns, 'new_tab': new_table_name})
def create_table(self, cur, table_name, fields):
log('Creating table %s', table_name, sender=self)
columns = ''
for column, typ, required, default in fields:
if required:
columns += '\n %s %s NOT NULL DEFAULT %s,' % (column, typ, default)
else:
columns += '\n %s %s,' % (column, typ)
columns = columns.rstrip(',')
sql = "CREATE TABLE %s (%s)" % (table_name, columns)
cur.execute(sql)
def upgrade_table(self, table_name, fields, index_list):
"""
Creates a table or adds fields to it.
"""
cur = self.cursor(lock=True)
cur.execute("PRAGMA table_info(%s)" % table_name)
available = cur.fetchall()
if not available:
self.create_table(cur, table_name, fields)
else:
# Table info columns, as returned by SQLite
ID, NAME, TYPE, NOTNULL, DEFAULT = range(5)
exists_notnull_column = any(bool(column[NOTNULL]) for column in available)
if not exists_notnull_column:
self.recreate_table(cur, table_name, fields, index_list)
else:
existing = set(column[NAME] for column in available)
for field_name, field_type, field_required, field_default in fields:
if field_name not in existing:
log('Adding column: %s.%s (%s)', table_name, field_name, field_type, sender=self)
sql = "ALTER TABLE %s ADD COLUMN %s %s" % (table_name, field_name, field_type)
if field_required:
sql += " NOT NULL DEFAULT %s" % (field_default)
cur.execute(sql)
for column, typ in index_list:
cur.execute('CREATE %s IF NOT EXISTS idx_%s ON %s (%s)' % (typ, column, table_name, column))
if table_name == self.TABLE_EPISODES:
cur.execute('CREATE UNIQUE INDEX IF NOT EXISTS idx_guids ON %s (%s)' % (table_name, ', '.join(('channel_id', 'guid'))))
self.lock.release()
def delete_episode_by_guid(self, guid, channel_id):
def delete_episode_by_guid(self, guid, podcast_id):
"""
Deletes episodes that have a specific GUID for
a given channel. Used after feed updates for
episodes that have disappeared from the feed.
"""
cur = self.cursor(lock=True)
cur.execute('DELETE FROM episodes WHERE channel_id = ? AND guid = ?', \
(channel_id, guid))
self.lock.release()
with self.lock:
cur = self.cursor()
cur.execute('DELETE FROM %s WHERE podcast_id = ? AND guid = ?' % self.TABLE_EPISODE, \
(podcast_id, guid))

View File

@ -116,7 +116,7 @@ class DBusPodcastsProxy(dbus.service.Object):
title = safe_str(episode.title)
url = safe_str(episode.url)
description = safe_first_line(episode.description)
filename = safe_str(episode.filename)
filename = safe_str(episode.download_filename)
file_type = safe_str(episode.file_type())
is_new = (episode.state == gpodder.STATE_NORMAL and not episode.is_played)
is_downloaded = episode.was_downloaded(and_exists=True)

View File

@ -331,9 +331,9 @@ class DownloadURLOpener(urllib.FancyURLopener):
if self._auth_retry_counter > 3:
raise AuthenticationError(_('Wrong username/password'))
if self.channel.username or self.channel.password:
log( 'Authenticating as "%s" to "%s" for realm "%s".', self.channel.username, host, realm, sender = self)
return ( self.channel.username, self.channel.password )
if self.channel.auth_username or self.channel.auth_password:
log( 'Authenticating as "%s" to "%s" for realm "%s".', self.channel.auth_username, host, realm, sender = self)
return ( self.channel.auth_username, self.channel.auth_password )
return (None, None)
@ -585,7 +585,7 @@ class DownloadTask(object):
self.filename = self.__episode.local_filename(create=True)
self.tempname = self.filename + '.partial'
self.total_size = self.__episode.length
self.total_size = self.__episode.file_size
self.speed = 0.0
self.progress = 0.0
self.error_message = None
@ -724,13 +724,13 @@ class DownloadTask(object):
headers, real_url = downloader.retrieve_resume(url, \
self.tempname, reporthook=self.status_updated)
new_mimetype = headers.get('content-type', self.__episode.mimetype)
old_mimetype = self.__episode.mimetype
new_mimetype = headers.get('content-type', self.__episode.mime_type)
old_mimetype = self.__episode.mime_type
_basename, ext = os.path.splitext(self.filename)
if new_mimetype != old_mimetype or util.wrong_extension(ext):
log('Correcting mime type: %s => %s', old_mimetype, new_mimetype, sender=self)
old_extension = self.__episode.extension()
self.__episode.mimetype = new_mimetype
self.__episode.mime_type = new_mimetype
new_extension = self.__episode.extension()
# If the desired filename extension changed due to the new

View File

@ -33,15 +33,15 @@ class gPodderChannel(BuilderWidget):
self.gPodderChannel.set_title( self.channel.title)
self.entryTitle.set_text( self.channel.title)
self.labelURL.set_text(self.channel.url)
self.cbSkipFeedUpdate.set_active(not self.channel.feed_update_enabled)
self.cbSkipFeedUpdate.set_active(self.channel.pause_subscription)
self.LabelDownloadTo.set_text( self.channel.save_dir)
self.LabelWebsite.set_text( self.channel.link)
if self.channel.username:
self.FeedUsername.set_text( self.channel.username)
if self.channel.password:
self.FeedPassword.set_text( self.channel.password)
if self.channel.auth_username:
self.FeedUsername.set_text( self.channel.auth_username)
if self.channel.auth_password:
self.FeedPassword.set_text( self.channel.auth_password)
self.cover_downloader.register('cover-available', self.cover_download_finished)
self.cover_downloader.request_cover(self.channel)
@ -101,10 +101,10 @@ class gPodderChannel(BuilderWidget):
self.cover_downloader.unregister('cover-available', self.cover_download_finished)
def on_btnOK_clicked(self, widget, *args):
self.channel.feed_update_enabled = not self.cbSkipFeedUpdate.get_active()
self.channel.pause_subscription = self.cbSkipFeedUpdate.get_active()
self.channel.set_custom_title(self.entryTitle.get_text())
self.channel.username = self.FeedUsername.get_text().strip()
self.channel.password = self.FeedPassword.get_text()
self.channel.auth_username = self.FeedUsername.get_text().strip()
self.channel.auth_password = self.FeedPassword.get_text()
self.channel.save()
self.cover_downloader.reload_cover_from_disk(self.channel)

View File

@ -81,7 +81,7 @@ class gPodderEpisodeSelector(BuilderWidget):
calculate the size of an episode; set this to
None if no total size calculation should be
done (in cases where total size is useless)
(default is 'length')
(default is 'file_size')
- tooltip_attribute: (optional) The name of an attribute of
the supplied episode objects that holds
the text for the tooltips when hovering
@ -113,7 +113,7 @@ class gPodderEpisodeSelector(BuilderWidget):
self.episodes = []
if not hasattr( self, 'size_attribute'):
self.size_attribute = 'length'
self.size_attribute = 'file_size'
if not hasattr(self, 'tooltip_attribute'):
self.tooltip_attribute = 'description'

View File

@ -143,7 +143,7 @@ class gPodderEpisodeActions(BuilderWidget):
self.radio_action_mark_new.connect_proxy(mark_new_button)
self.radio_action_mark_old.connect_proxy(mark_old_button)
if self.episode.length > 0:
if self.episode.file_size > 0:
download_button.set_title(self.action_download.props.label)
download_button.set_value(self.episode.get_filesize_string())

View File

@ -96,9 +96,9 @@ class gPodderEpisodes(BuilderWidget):
self.main_window.set_app_menu(appmenu)
def on_pause_subscription_button_toggled(self, widget):
new_value = not widget.get_active()
if new_value != self.channel.feed_update_enabled:
self.channel.feed_update_enabled = new_value
new_value = widget.get_active()
if new_value != self.channel.pause_subscription:
self.channel.pause_subscription = new_value
self.cover_downloader.reload_cover_from_disk(self.channel)
self.channel.save()
self.update_podcast_list_model(urls=[self.channel.url])
@ -120,10 +120,10 @@ class gPodderEpisodes(BuilderWidget):
def on_login_button_clicked(self, widget):
accept, auth_data = self.show_login_dialog(_('Login to %s') % \
self.channel.title, '', \
self.channel.username, \
self.channel.password)
self.channel.auth_username, \
self.channel.auth_password)
if accept:
self.channel.username, self.channel.password = auth_data
self.channel.auth_username, self.channel.auth_password = auth_data
self.channel.save()
def on_website_button_clicked(self, widget):
@ -227,8 +227,7 @@ class gPodderEpisodes(BuilderWidget):
else:
self.pause_sub_button.show()
self.pause_sub_button.set_active(\
not self.channel.feed_update_enabled)
self.pause_sub_button.set_active(self.channel.pause_subscription)
self.main_window.set_title(self.channel.title)
self.main_window.show()

View File

@ -84,7 +84,7 @@ class gPodderEpisodeSelector(BuilderWidget):
calculate the size of an episode; set this to
None if no total size calculation should be
done (in cases where total size is useless)
(default is 'length')
(default is 'file_size')
- tooltip_attribute: (optional) The name of an attribute of
the supplied episode objects that holds
the text for the tooltips when hovering
@ -116,7 +116,7 @@ class gPodderEpisodeSelector(BuilderWidget):
self.episodes = []
if not hasattr( self, 'size_attribute'):
self.size_attribute = 'length'
self.size_attribute = 'file_size'
if not hasattr(self, 'tooltip_attribute'):
self.tooltip_attribute = 'description'

View File

@ -161,9 +161,9 @@ class EpisodeListModel(gtk.GenericTreeModel):
(gpodder.STATE_DOWNLOADED, gpodder.STATE_NORMAL))) or \
downloading(episode)
elif column == self.C_FILESIZE:
return episode.length
return episode.file_size
elif column == self.C_PUBLISHED:
return episode.pubDate
return episode.published
elif column == self.C_TIME:
return episode.get_play_info_string()
elif column == self.C_TIME_VISIBLE:
@ -267,8 +267,8 @@ class EpisodeListModel(gtk.GenericTreeModel):
def _format_filesize(self, episode):
if episode.length > 0:
return util.format_filesize(episode.length, 1)
if episode.file_size > 0:
return util.format_filesize(episode.file_size, 1)
else:
return None
@ -594,7 +594,7 @@ class PodcastListModel(model.PodcastListModel):
def _format_description(self, channel, total, deleted, \
new, downloaded, unplayed):
title_markup = cgi.escape(channel.title)
if not channel.feed_update_enabled:
if channel.pause_subscription:
disabled_text = cgi.escape(_('Subscription paused'))
if new:
return self._active_markup % (title_markup, disabled_text)

View File

@ -93,13 +93,13 @@ class gPodderChannel(BuilderWidget):
title = _('Edit podcast authentication')
message = _('Please enter your username and password.')
success, auth_tokens = self.show_login_dialog(title, message, \
username=self.channel.username, password=self.channel.password)
username=self.channel.auth_username, password=self.channel.auth_password)
if success:
username, password = auth_tokens
if self.channel.username != username or \
self.channel.password != password:
self.channel.username = username
self.channel.password = password
if self.channel.auth_username != username or \
self.channel.auth_password != password:
self.channel.auth_username = username
self.channel.auth_password = password
self.channel.save()
if not username and not password:
self.show_message(_('Username and password removed.'), \

View File

@ -80,7 +80,7 @@ class gPodderEpisodeSelector(BuilderWidget):
calculate the size of an episode; set this to
None if no total size calculation should be
done (in cases where total size is useless)
(default is 'length')
(default is 'file_size')
- tooltip_attribute: (optional) The name of an attribute of
the supplied episode objects that holds
the text for the tooltips when hovering
@ -112,7 +112,7 @@ class gPodderEpisodeSelector(BuilderWidget):
self.episodes = []
if not hasattr( self, 'size_attribute'):
self.size_attribute = 'length'
self.size_attribute = 'file_size'
if not hasattr(self, 'tooltip_attribute'):
self.tooltip_attribute = 'description'

View File

@ -102,8 +102,8 @@ class EpisodeListModel(gtk.ListStore):
def _format_filesize(self, episode):
if episode.length > 0:
return util.format_filesize(episode.length, 1)
if episode.file_size > 0:
return util.format_filesize(episode.file_size, 1)
else:
return None
@ -219,8 +219,8 @@ class EpisodeListModel(gtk.ListStore):
True, \
True, \
True, \
episode.length, \
episode.pubDate, \
episode.file_size, \
episode.published, \
episode.get_play_info_string(), \
episode.total_time and not episode.current_position, \
episode.total_time and episode.current_position, \
@ -487,7 +487,7 @@ class PodcastChannelProxy(object):
self._save_dir_size_set = False
self.save_dir_size = 0L
self.cover_file = os.path.join(gpodder.images_folder, 'podcast-all.png')
self.feed_update_enabled = True
self.pause_subscription = False
def __getattribute__(self, name):
try:
@ -669,7 +669,7 @@ class PodcastListModel(gtk.ListStore):
pixbuf = self._cover_downloader.get_cover(channel, avoid_downloading=True)
pixbuf_overlay = self._resize_pixbuf(channel.url, pixbuf)
if add_overlay and not channel.feed_update_enabled:
if add_overlay and channel.pause_subscription:
pixbuf_overlay = self._overlay_pixbuf(pixbuf_overlay, self.ICON_DISABLED)
pixbuf_overlay.saturate_and_pixelate(pixbuf_overlay, 0.0, False)
@ -684,7 +684,7 @@ class PodcastListModel(gtk.ListStore):
def _format_description(self, channel, total, deleted, \
new, downloaded, unplayed):
title_markup = xml.sax.saxutils.escape(channel.title)
if channel.feed_update_enabled:
if not channel.pause_subscription:
description_markup = xml.sax.saxutils.escape(util.get_first_line(channel.description) or ' ')
else:
description_markup = xml.sax.saxutils.escape(_('Subscription paused'))
@ -797,7 +797,7 @@ class PodcastListModel(gtk.ListStore):
def add_cover_by_channel(self, channel, pixbuf):
# Resize and add the new cover image
pixbuf = self._resize_pixbuf(channel.url, pixbuf)
if not channel.feed_update_enabled:
if channel.pause_subscription:
pixbuf = self._overlay_pixbuf(pixbuf, self.ICON_DISABLED)
pixbuf.saturate_and_pixelate(pixbuf, 0.0, False)

View File

@ -156,7 +156,7 @@ class CoverDownloader(ObservableService):
if not os.path.exists(channel.cover_file):
if url is None:
url = channel.image
url = channel.cover_url
new_url = youtube.get_real_cover(channel.url)
if new_url is not None:

View File

@ -577,13 +577,13 @@ class gPodder(BuilderWidget, dbus.service.Object):
if len(file_parts) == 2:
dir_name, filename = file_parts
channels = [c for c in self.channels if c.foldername == dir_name]
channels = [c for c in self.channels if c.download_folder == dir_name]
if len(channels) == 1:
channel = channels[0]
return channel.get_episode_by_filename(filename)
else:
# Possibly remote file - search the database for a podcast
channel_id = self.db.get_channel_id_from_episode_url(uri)
channel_id = self.db.get_podcast_id_from_episode_url(uri)
if channel_id is not None:
channels = [c for c in self.channels if c.id == channel_id]
@ -1776,7 +1776,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
menu.append(gtk.SeparatorMenuItem())
item = gtk.CheckMenuItem(_('Archive'))
item.set_active(self.active_channel.channel_is_locked)
item.set_active(self.active_channel.auto_archive_episodes)
item.connect('activate', self.on_channel_toggle_lock_activate)
menu.append(self.set_finger_friendly(item))
@ -2056,14 +2056,6 @@ class gPodder(BuilderWidget, dbus.service.Object):
for tempfile in temporary_files:
util.delete_file(tempfile)
# Clean up empty download folders and abandoned download folders
download_dirs = glob.glob(os.path.join(gpodder.downloads, '*'))
for ddir in download_dirs:
if os.path.isdir(ddir) and False: # FIXME not db.channel_foldername_exists(os.path.basename(ddir)):
globr = glob.glob(os.path.join(ddir, '*'))
if len(globr) == 0 or (len(globr) == 1 and globr[0].endswith('/cover')):
log('Stale download directory found: %s', os.path.basename(ddir), sender=self)
shutil.rmtree(ddir, ignore_errors=True)
def streaming_possible(self):
if gpodder.ui.desktop:
@ -2618,10 +2610,10 @@ class gPodder(BuilderWidget, dbus.service.Object):
except ValueError, ve:
username, password = (None, None)
if username is not None and channel.username is None and \
password is not None and channel.password is None:
channel.username = username
channel.password = password
if username is not None and channel.auth_username is None and \
password is not None and channel.auth_password is None:
channel.auth_username = username
channel.auth_password = password
channel.save()
self._update_cover(channel)
@ -2916,7 +2908,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
if channels is None:
# Only update podcasts for which updates are enabled
channels = [c for c in self.channels if c.feed_update_enabled]
channels = [c for c in self.channels if not c.pause_subscription]
if gpodder.ui.fremantle:
hildon.hildon_gtk_window_set_progress_indicator(self.main_window, True)
@ -3121,8 +3113,8 @@ class gPodder(BuilderWidget, dbus.service.Object):
else:
columns = (
('title_markup', None, None, _('Episode')),
('filesize_prop', 'length', gobject.TYPE_INT, _('Size')),
('pubdate_prop', 'pubDate', gobject.TYPE_INT, _('Released')),
('filesize_prop', 'file_size', gobject.TYPE_INT, _('Size')),
('pubdate_prop', 'published', gobject.TYPE_INT, _('Released')),
('played_prop', None, None, _('Status')),
('age_prop', 'age_int_prop', gobject.TYPE_INT, _('Downloaded')),
)
@ -3195,11 +3187,11 @@ class gPodder(BuilderWidget, dbus.service.Object):
if self.active_channel is None:
return
self.active_channel.channel_is_locked = not self.active_channel.channel_is_locked
self.active_channel.update_channel_lock()
self.active_channel.auto_archive_episodes = not self.active_channel.auto_archive_episodes
self.active_channel.save()
for episode in self.active_channel.get_all_episodes():
episode.mark(is_locked=self.active_channel.channel_is_locked)
episode.mark(is_locked=self.active_channel.auto_archive_episodes)
self.update_podcast_list_model(selected=True)
self.update_episode_list_icons(all=True)
@ -3302,8 +3294,8 @@ class gPodder(BuilderWidget, dbus.service.Object):
else:
columns = (
('title_markup', None, None, _('Episode')),
('filesize_prop', 'length', gobject.TYPE_INT, _('Size')),
('pubdate_prop', 'pubDate', gobject.TYPE_INT, _('Released')),
('filesize_prop', 'file_size', gobject.TYPE_INT, _('Size')),
('pubdate_prop', 'published', gobject.TYPE_INT, _('Released')),
)
show_notification = False

View File

@ -59,8 +59,8 @@ class gPodderFetcher(feedcore.Fetcher):
feedcore.Fetcher.__init__(self, gpodder.user_agent)
def fetch_channel(self, channel):
etag = channel.etag
modified = feedparser._parse_date(channel.last_modified)
etag = channel.http_etag
modified = feedparser._parse_date(channel.http_last_modified)
# If we have a username or password, rebuild the url with them included
# Note: using a HTTPBasicAuthHandler would be pain because we need to
# know the realm. It can be done, but I think this method works, too
@ -121,7 +121,7 @@ class PodcastChannel(PodcastModelObject):
@classmethod
def load_from_db(cls, db):
return db.load_channels(factory=cls.create_from_dict)
return db.load_podcasts(factory=cls.create_from_dict)
@classmethod
def load(cls, db, url, create=True, authentication_tokens=None,\
@ -130,19 +130,18 @@ class PodcastChannel(PodcastModelObject):
if isinstance(url, unicode):
url = url.encode('utf-8')
tmp = db.load_channels(factory=cls.create_from_dict, url=url)
tmp = db.load_podcasts(factory=cls.create_from_dict, url=url)
if len(tmp):
return tmp[0]
elif create:
tmp = PodcastChannel(db)
tmp.url = url
if authentication_tokens is not None:
tmp.username = authentication_tokens[0]
tmp.password = authentication_tokens[1]
tmp.auth_username = authentication_tokens[0]
tmp.auth_password = authentication_tokens[1]
tmp.update(max_episodes, mimetype_prefs)
tmp.save()
db.force_last_new(tmp)
return tmp
def episode_factory(self, d, db__parameter_is_unused=None):
@ -159,8 +158,8 @@ class PodcastChannel(PodcastModelObject):
self.title = custom_feed.get_title()
self.link = custom_feed.get_link()
self.description = custom_feed.get_description()
self.image = custom_feed.get_image()
self.pubDate = time.time()
self.cover_url = custom_feed.get_image()
self.published = int(time.time())
self.save()
guids = [episode.guid for episode in self.get_all_episodes()]
@ -187,19 +186,19 @@ class PodcastChannel(PodcastModelObject):
# End YouTube-specific title FIX
try:
self.pubDate = rfc822.mktime_tz(feed.feed.get('updated_parsed', None+(0,)))
self.published = int(rfc822.mktime_tz(feed.feed.get('updated_parsed', None+(0,))))
except:
self.pubDate = time.time()
self.published = int(time.time())
if hasattr(feed.feed, 'image'):
for attribute in ('href', 'url'):
new_value = getattr(feed.feed.image, attribute, None)
if new_value is not None:
log('Found cover art in %s: %s', attribute, new_value)
self.image = new_value
self.cover_url = new_value
if hasattr(feed.feed, 'icon'):
self.image = feed.feed.icon
self.cover_url = feed.feed.icon
self.save()
@ -228,8 +227,8 @@ class PodcastChannel(PodcastModelObject):
# GUID-based existing episode list
existing_guids = dict((e.guid, e) for e in existing)
# Get most recent pubDate of all episodes
last_pubdate = self.db.get_last_pubdate(self) or 0
# Get most recent published of all episodes
last_published = self.db.get_last_published(self) or 0
# Search all entries for new episodes
for entry in entries:
@ -262,9 +261,9 @@ class PodcastChannel(PodcastModelObject):
# Workaround for bug 340: If the episode has been
# published earlier than one week before the most
# recent existing episode, do not mark it as new.
if episode.pubDate < last_pubdate - self.SECONDS_PER_WEEK:
if episode.published < last_published - self.SECONDS_PER_WEEK:
log('Episode with old date: %s', episode.title, sender=self)
episode.is_played = True
episode.is_new = False
episode.save()
@ -286,13 +285,9 @@ class PodcastChannel(PodcastModelObject):
# in certain situations (see bug #340).
self.db.purge(max_episodes, self.id)
def update_channel_lock(self):
self.db.update_channel_lock(self)
def _update_etag_modified(self, feed):
self.updated_timestamp = time.time()
self.etag = feed.headers.get('etag', self.etag)
self.last_modified = feed.headers.get('last-modified', self.last_modified)
self.http_etag = feed.headers.get('etag', self.http_etag)
self.http_last_modified = feed.headers.get('last-modified', self.http_last_modified)
def update(self, max_episodes=0, mimetype_prefs=''):
try:
@ -337,24 +332,24 @@ class PodcastChannel(PodcastModelObject):
self.db.commit()
def delete(self):
self.db.delete_channel(self)
self.db.delete_podcast(self)
def save(self):
if gpodder.user_hooks is not None:
gpodder.user_hooks.on_podcast_save(self)
if self.foldername is None:
# get_save_dir() finds a unique value for foldername
if self.download_folder is None:
# get_save_dir() finds a unique value for download_folder
self.get_save_dir()
self.db.save_channel(self)
self.db.save_podcast(self)
def get_statistics(self):
if self.id is None:
return (0, 0, 0, 0, 0)
else:
return self.db.get_channel_count(int(self.id))
return self.db.get_podcast_statistics(self.id)
def authenticate_url(self, url):
return util.url_add_authentication(url, self.username, self.password)
return util.url_add_authentication(url, self.auth_username, self.auth_password)
def __init__(self, db):
self.db = db
@ -363,28 +358,27 @@ class PodcastChannel(PodcastModelObject):
self.title = ''
self.link = ''
self.description = ''
self.image = None
self.pubDate = 0
self.cover_url = None
self.published = 0
self.parse_error = None
self.foldername = None
self.auto_foldername = 1 # automatically generated foldername
# if set, this overrides the channel-provided title
self.override_title = ''
self.username = ''
self.password = ''
self.auth_username = ''
self.auth_password = ''
self.last_modified = None
self.etag = None
self.http_last_modified = None
self.http_etag = None
self.auto_archive_episodes = False
self.download_folder = None
self.pause_subscription = False
self.save_dir_size = 0
self.__save_dir_size_set = False
self.channel_is_locked = False
def _get_cover_url(self):
return self.cover_url
self.updated_timestamp = 0
self.feed_update_enabled = True
image = property(_get_cover_url)
def request_save_dir_size(self):
if not self.__save_dir_size_set:
@ -395,9 +389,7 @@ class PodcastChannel(PodcastModelObject):
self.save_dir_size = util.calculate_size(self.save_dir)
def get_title( self):
if self.override_title:
return self.override_title
elif not self.__title.strip():
if not self.__title.strip():
return self.url
else:
return self.__title
@ -410,24 +402,20 @@ class PodcastChannel(PodcastModelObject):
def set_custom_title( self, custom_title):
custom_title = custom_title.strip()
# if the custom title is the same as we have
if custom_title == self.override_title:
return
# if custom title is the same as channel title and we didn't have a custom title
if custom_title == self.__title and self.override_title == '':
if custom_title == self.title:
return
# make sure self.foldername is initialized
# make sure self.download_folder is initialized
self.get_save_dir()
# rename folder if custom_title looks sane
new_folder_name = self.find_unique_folder_name(custom_title)
if len(new_folder_name) > 0 and new_folder_name != self.foldername:
log('Changing foldername based on custom title: %s', custom_title, sender=self)
if len(new_folder_name) > 0 and new_folder_name != self.download_folder:
log('Changing download_folder based on custom title: %s', custom_title, sender=self)
new_folder = os.path.join(gpodder.downloads, new_folder_name)
old_folder = os.path.join(gpodder.downloads, self.foldername)
old_folder = os.path.join(gpodder.downloads, self.download_folder)
if os.path.exists(old_folder):
if not os.path.exists(new_folder):
# Old folder exists, new folder does not -> simply rename
@ -440,13 +428,10 @@ class PodcastChannel(PodcastModelObject):
shutil.move(file, new_folder)
log('Removing %s', old_folder, sender=self)
shutil.rmtree(old_folder, ignore_errors=True)
self.foldername = new_folder_name
self.download_folder = new_folder_name
self.save()
if custom_title != self.__title:
self.override_title = custom_title
else:
self.override_title = ''
self.title = custom_title
def get_downloaded_episodes(self):
return self.db.load_episodes(self, factory=self.episode_factory, state=gpodder.STATE_DOWNLOADED)
@ -495,17 +480,17 @@ class PodcastChannel(PodcastModelObject):
def get_all_episodes(self):
return self.db.load_episodes(self, factory=self.episode_factory)
def find_unique_folder_name(self, foldername):
def find_unique_folder_name(self, download_folder):
# Remove trailing dots to avoid errors on Windows (bug 600)
foldername = foldername.strip().rstrip('.')
download_folder = download_folder.strip().rstrip('.')
current_try = util.sanitize_filename(foldername, \
current_try = util.sanitize_filename(download_folder, \
self.MAX_FOLDERNAME_LENGTH)
next_try_id = 2
while True:
if self.db.channel_foldername_exists(current_try):
current_try = '%s (%d)' % (foldername, next_try_id)
if self.db.podcast_download_folder_exists(current_try):
current_try = '%s (%d)' % (download_folder, next_try_id)
next_try_id += 1
else:
return current_try
@ -513,7 +498,7 @@ class PodcastChannel(PodcastModelObject):
def get_save_dir(self):
urldigest = hashlib.md5(self.url).hexdigest()
sanitizedurl = util.sanitize_filename(self.url, self.MAX_FOLDERNAME_LENGTH)
if self.foldername is None or (self.auto_foldername and (self.foldername == urldigest or self.foldername.startswith(sanitizedurl))):
if self.download_folder is None:
# we must change the folder name, because it has not been set manually
fn_template = util.sanitize_filename(self.title, self.MAX_FOLDERNAME_LENGTH)
@ -528,18 +513,18 @@ class PodcastChannel(PodcastModelObject):
fn_template = urldigest # no need for sanitize_filename here
# Find a unique folder name for this podcast
wanted_foldername = self.find_unique_folder_name(fn_template)
wanted_download_folder = self.find_unique_folder_name(fn_template)
# if the foldername has not been set, check if the (old) md5 filename exists
if self.foldername is None and os.path.exists(os.path.join(gpodder.downloads, urldigest)):
# if the download_folder has not been set, check if the (old) md5 filename exists
if self.download_folder is None and os.path.exists(os.path.join(gpodder.downloads, urldigest)):
log('Found pre-0.15.0 download folder for %s: %s', self.title, urldigest, sender=self)
self.foldername = urldigest
self.download_folder = urldigest
# we have a valid, new folder name in "current_try" -> use that!
if self.foldername is not None and wanted_foldername != self.foldername:
if self.download_folder is not None and wanted_download_folder != self.download_folder:
# there might be an old download folder crawling around - move it!
new_folder_name = os.path.join(gpodder.downloads, wanted_foldername)
old_folder_name = os.path.join(gpodder.downloads, self.foldername)
new_folder_name = os.path.join(gpodder.downloads, wanted_download_folder)
old_folder_name = os.path.join(gpodder.downloads, self.download_folder)
if os.path.exists(old_folder_name):
if not os.path.exists(new_folder_name):
# Old folder exists, new folder does not -> simply rename
@ -552,11 +537,11 @@ class PodcastChannel(PodcastModelObject):
shutil.move(file, new_folder_name)
log('Removing %s', old_folder_name, sender=self)
shutil.rmtree(old_folder_name, ignore_errors=True)
log('Updating foldername of %s to "%s".', self.url, wanted_foldername, sender=self)
self.foldername = wanted_foldername
log('Updating download_folder of %s to "%s".', self.url, wanted_download_folder, sender=self)
self.download_folder = wanted_download_folder
self.save()
save_dir = os.path.join(gpodder.downloads, self.foldername)
save_dir = os.path.join(gpodder.downloads, self.download_folder)
# Create save_dir if it does not yet exist
if not util.make_directory( save_dir):
@ -600,32 +585,22 @@ class PodcastEpisode(PodcastModelObject):
"""holds data for one object in a channel"""
MAX_FILENAME_LENGTH = 200
def _get_played(self):
return self.is_played
def _get_is_played(self):
return not self.is_new
def _set_played(self, played):
self.is_played = played
def _set_is_played(self, is_played):
self.is_new = not is_played
# Alias "is_played" to "played" for DB column mapping
played = property(fget=_get_played, fset=_set_played)
is_played = property(fget=_get_is_played, fset=_set_is_played)
def _get_locked(self):
return self.is_locked
def _set_locked(self, locked):
self.is_locked = locked
# Alias "is_locked" to "locked" for DB column mapping
locked = property(fget=_get_locked, fset=_set_locked)
def _get_channel_id(self):
def _get_podcast_id(self):
return self.channel.id
def _set_channel_id(self, channel_id):
assert self.channel.id == channel_id
def _set_podcast_id(self, podcast_id):
assert self.channel.id == podcast_id
# Accessor for the "channel_id" DB column
channel_id = property(fget=_get_channel_id, fset=_set_channel_id)
# Accessor for the "podcast_id" DB column
podcast_id = property(fget=_get_podcast_id, fset=_set_podcast_id)
@staticmethod
def sort_by_pubdate(episodes, reverse=False):
@ -633,7 +608,7 @@ class PodcastEpisode(PodcastModelObject):
Returns a iterable, sorted sequence of the episodes
"""
key_pubdate = lambda e: e.pubDate
key_pubdate = lambda e: e.published
return sorted(episodes, key=key_pubdate, reverse=reverse)
def reload_from_db(self):
@ -678,7 +653,7 @@ class PodcastEpisode(PodcastModelObject):
episode.guid = entry.get('id', '')
if entry.get('updated_parsed', None):
episode.pubDate = rfc822.mktime_tz(entry.updated_parsed+(0,))
episode.published = rfc822.mktime_tz(entry.updated_parsed+(0,))
enclosures = entry.get('enclosures', ())
audio_available = any(e.get('type', '').startswith('audio/') \
@ -705,17 +680,17 @@ class PodcastEpisode(PodcastModelObject):
# Enclosures
for e in sorted(enclosures, key=calculate_preference_value):
episode.mimetype = e.get('type', 'application/octet-stream')
if episode.mimetype == '':
episode.mime_type = e.get('type', 'application/octet-stream')
if episode.mime_type == '':
# See Maemo bug 10036
log('Fixing empty mimetype in ugly feed', sender=episode)
episode.mimetype = 'application/octet-stream'
episode.mime_type = 'application/octet-stream'
if '/' not in episode.mimetype:
if '/' not in episode.mime_type:
continue
# Skip images in feeds if audio or video is available (bug 979)
if episode.mimetype.startswith('image/') and \
if episode.mime_type.startswith('image/') and \
(audio_available or video_available):
continue
@ -724,16 +699,16 @@ class PodcastEpisode(PodcastModelObject):
continue
try:
episode.length = int(e.length) or -1
episode.file_size = int(e.length) or -1
except:
episode.length = -1
episode.file_size = -1
return episode
# Media RSS content
for m in entry.get('media_content', ()):
episode.mimetype = m.get('type', 'application/octet-stream')
if '/' not in episode.mimetype:
episode.mime_type = m.get('type', 'application/octet-stream')
if '/' not in episode.mime_type:
continue
episode.url = util.normalize_feed_url(m.get('url', ''))
@ -741,9 +716,9 @@ class PodcastEpisode(PodcastModelObject):
continue
try:
episode.length = int(m.fileSize) or -1
episode.file_size = int(m.fileSize) or -1
except:
episode.length = -1
episode.file_size = -1
return episode
@ -783,22 +758,18 @@ class PodcastEpisode(PodcastModelObject):
self.id = None
self.url = ''
self.title = ''
self.length = 0
self.mimetype = 'application/octet-stream'
self.file_size = 0
self.mime_type = 'application/octet-stream'
self.guid = ''
self.description = ''
self.link = ''
self.channel = channel
self.pubDate = 0
self.filename = None
self.auto_filename = 1 # automatically generated filename
self.published = 0
self.download_filename = None
self.state = gpodder.STATE_NORMAL
self.is_played = False
# Initialize the "is_locked" property
self._is_locked = False
self.is_locked = channel.channel_is_locked
self.is_new = True
self.archive = channel.auto_archive_episodes
# Time attributes
self.total_time = 0
@ -806,10 +777,10 @@ class PodcastEpisode(PodcastModelObject):
self.current_position_updated = 0
def get_is_locked(self):
return self._is_locked
return self.archive
def set_is_locked(self, is_locked):
self._is_locked = bool(is_locked)
self.archive = bool(is_locked)
is_locked = property(fget=get_is_locked, fset=set_is_locked)
@ -822,8 +793,8 @@ class PodcastEpisode(PodcastModelObject):
def on_downloaded(self, filename):
self.state = gpodder.STATE_DOWNLOADED
self.is_played = False
self.length = os.path.getsize(filename)
self.is_new = True
self.file_size = os.path.getsize(filename)
if not self.total_time:
try:
@ -833,15 +804,11 @@ class PodcastEpisode(PodcastModelObject):
log('Detected media length: %d seconds', length, \
sender=self)
self.total_time = length
self.db.save_episode(self)
self.db.commit()
return
except Exception, e:
log('Error while detecting media length: %s', str(e), \
sender=self)
self.db.save_downloaded_episode(self)
self.db.commit()
self.save()
def set_state(self, state):
self.state = state
@ -851,7 +818,7 @@ class PodcastEpisode(PodcastModelObject):
if state is not None:
self.state = state
if is_played is not None:
self.is_played = is_played
self.is_new = not is_played
if is_locked is not None:
self.is_locked = is_locked
self.db.update_episode_state(self)
@ -863,7 +830,7 @@ class PodcastEpisode(PodcastModelObject):
@property
def maemo_markup(self):
if self.length > 0:
if self.file_size > 0:
length_str = '%s; ' % self.filesize_prop
else:
length_str = ''
@ -878,7 +845,7 @@ class PodcastEpisode(PodcastModelObject):
def maemo_remove_markup(self):
if self.total_time and self.current_position:
played_string = self.get_play_info_string()
elif self.is_played:
elif not self.is_new:
played_string = _('played')
else:
played_string = _('unplayed')
@ -927,7 +894,7 @@ class PodcastEpisode(PodcastModelObject):
next_try_id = 2
lookup_url = None
if self.filename == current_try and current_try is not None:
if self.download_filename == current_try and current_try is not None:
# We already have this filename - good!
return current_try
@ -972,27 +939,19 @@ class PodcastEpisode(PodcastModelObject):
# have to know md5 filenames if they are downloaded already
urldigest = hashlib.md5(self.url).hexdigest()
if not create and self.filename is None:
urldigest_filename = os.path.join(self.channel.save_dir, urldigest+ext)
if os.path.exists(urldigest_filename):
# The file exists, so set it up in our database
log('Recovering pre-0.15.0 file: %s', urldigest_filename, sender=self)
self.filename = urldigest+ext
self.auto_filename = 1
self.save()
return urldigest_filename
if not create and self.download_filename is None:
return None
# We only want to check if the file exists, so don't try to
# rename the file, even if it would be reasonable. See also:
# http://bugs.gpodder.org/attachment.cgi?id=236
if check_only:
if self.filename is None:
if self.download_filename is None:
return None
else:
return os.path.join(self.channel.save_dir, self.filename)
return os.path.join(self.channel.save_dir, self.download_filename)
if self.filename is None or force_update or (self.auto_filename and self.filename == urldigest+ext):
if self.download_filename is None or force_update:
# Try to find a new filename for the current file
if template is not None:
# If template is specified, trust the template's extension
@ -1029,15 +988,15 @@ class PodcastEpisode(PodcastModelObject):
wanted_filename = self.find_unique_file_name(self.url, fn_template, ext)
# We populate the filename field the first time - does the old file still exist?
if self.filename is None and os.path.exists(os.path.join(self.channel.save_dir, urldigest+ext)):
if self.download_filename is None and os.path.exists(os.path.join(self.channel.save_dir, urldigest+ext)):
log('Found pre-0.15.0 downloaded file: %s', urldigest, sender=self)
self.filename = urldigest+ext
self.download_filename = urldigest+ext
# The old file exists, but we have decided to want a different filename
if self.filename is not None and wanted_filename != self.filename:
if self.download_filename is not None and wanted_filename != self.download_filename:
# there might be an old download folder crawling around - move it!
new_file_name = os.path.join(self.channel.save_dir, wanted_filename)
old_file_name = os.path.join(self.channel.save_dir, self.filename)
old_file_name = os.path.join(self.channel.save_dir, self.download_filename)
if os.path.exists(old_file_name) and not os.path.exists(new_file_name):
log('Renaming %s => %s', old_file_name, new_file_name, sender=self)
os.rename(old_file_name, new_file_name)
@ -1048,20 +1007,20 @@ class PodcastEpisode(PodcastModelObject):
else:
log('Warning: %s exists or %s does not.', new_file_name, old_file_name, sender=self)
log('Updating filename of %s to "%s".', self.url, wanted_filename, sender=self)
elif self.filename is None:
elif self.download_filename is None:
log('Setting filename to "%s".', wanted_filename, sender=self)
else:
log('Should update filename. Stays the same (%s). Good!', \
wanted_filename, sender=self)
self.filename = wanted_filename
self.download_filename = wanted_filename
self.save()
self.db.commit()
return os.path.join(self.channel.save_dir, self.filename)
return os.path.join(self.channel.save_dir, self.download_filename)
def set_mimetype(self, mimetype, commit=False):
"""Sets the mimetype for this episode"""
self.mimetype = mimetype
self.mime_type = mimetype
if commit:
self.db.commit()
@ -1073,7 +1032,7 @@ class PodcastEpisode(PodcastModelObject):
filename, ext = os.path.splitext(filename)
# if we can't detect the extension from the url fallback on the mimetype
if ext == '' or util.file_type_by_extension(ext) is None:
ext = util.extension_from_mimetype(self.mimetype)
ext = util.extension_from_mimetype(self.mime_type)
return ext
def check_is_new(self, downloading=lambda e: False):
@ -1084,16 +1043,16 @@ class PodcastEpisode(PodcastModelObject):
being downloaded at the moment.
"""
return self.state == gpodder.STATE_NORMAL and \
not self.is_played and \
self.is_new and \
not downloading(self)
def mark_new(self):
self.state = gpodder.STATE_NORMAL
self.is_played = False
self.is_new = True
self.db.update_episode_state(self)
def mark_old(self):
self.is_played = True
self.is_new = False
self.db.update_episode_state(self)
def file_exists(self):
@ -1123,17 +1082,6 @@ class PodcastEpisode(PodcastModelObject):
@property
def basename( self):
return os.path.splitext( os.path.basename( self.url))[0]
@property
def published( self):
"""
Returns published date as YYYYMMDD (or 00000000 if not available)
"""
try:
return datetime.datetime.fromtimestamp(self.pubDate).strftime('%Y%m%d')
except:
log( 'Cannot format pubDate for "%s".', self.title, sender = self)
return '00000000'
@property
def pubtime(self):
@ -1141,9 +1089,9 @@ class PodcastEpisode(PodcastModelObject):
Returns published time as HHMM (or 0000 if not available)
"""
try:
return datetime.datetime.fromtimestamp(self.pubDate).strftime('%H%M')
return datetime.datetime.fromtimestamp(self.published).strftime('%H%M')
except:
log('Cannot format pubDate (time) for "%s".', self.title, sender=self)
log('Cannot format published (time) for "%s".', self.title, sender=self)
return '0000'
def playlist_title(self):
@ -1159,7 +1107,7 @@ class PodcastEpisode(PodcastModelObject):
self.cute_pubdate())
def cute_pubdate(self):
result = util.format_date(self.pubDate)
result = util.format_date(self.published)
if result is None:
return '(%s)' % _('unknown')
else:
@ -1172,7 +1120,7 @@ class PodcastEpisode(PodcastModelObject):
if filename is None:
log('calculate_filesized called, but filename is None!', sender=self)
try:
self.length = os.path.getsize(filename)
self.file_size = os.path.getsize(filename)
except:
log( 'Could not get filesize for %s.', self.url)
@ -1204,12 +1152,12 @@ class PodcastEpisode(PodcastModelObject):
return util.format_time(self.total_time)
def get_filesize_string(self):
return util.format_filesize(self.length)
return util.format_filesize(self.file_size)
filesize_prop = property(fget=get_filesize_string)
def get_played_string( self):
if not self.is_played:
if self.is_new:
return _('Unplayed')
return ''
@ -1217,15 +1165,15 @@ class PodcastEpisode(PodcastModelObject):
played_prop = property(fget=get_played_string)
def is_duplicate(self, episode):
if self.title == episode.title and self.pubDate == episode.pubDate:
if self.title == episode.title and self.published == episode.published:
log('Possible duplicate detected: %s', self.title)
return True
return False
def duplicate_id(self):
return hash((self.title, self.pubDate))
return hash((self.title, self.published))
def update_from(self, episode):
for k in ('title', 'url', 'description', 'link', 'pubDate', 'guid'):
for k in ('title', 'url', 'description', 'link', 'published', 'guid'):
setattr(self, k, getattr(episode, k))

View File

@ -63,15 +63,15 @@ class Matcher(object):
elif k in ('video', 'audio'):
return episode.file_type() == k
elif k == 'torrent':
return episode.url.endswith('.torrent') or 'torrent' in episode.mimetype
return episode.url.endswith('.torrent') or 'torrent' in episode.mime_type
# Nouns (for comparisons)
if k in ('megabytes', 'mb'):
return float(episode.length) / (1024*1024)
return float(episode.file_size) / (1024*1024)
elif k == 'title':
return episode.title
elif k == 'since':
return (datetime.datetime.now() - datetime.datetime.fromtimestamp(episode.pubDate)).days
return (datetime.datetime.now() - datetime.datetime.fromtimestamp(episode.published)).days
elif k in ('minutes', 'min'):
return float(episode.total_time) / 60
elif k in ('remaining', 'rem'):

View File

@ -156,10 +156,10 @@ class SoundcloudUser(object):
'link': track.get('permalink_url', 'http://soundcloud.com/'+self.username),
'description': track.get('description', _('No description available')),
'url': url,
'length': int(filesize),
'mimetype': filetype,
'file_size': int(filesize),
'mime_type': filetype,
'guid': track.get('permalink', track.get('id')),
'pubDate': soundcloud_parsedate(track.get('created_at', None)),
'published': soundcloud_parsedate(track.get('created_at', None)),
}
finally:
self.commit_cache()

View File

@ -150,10 +150,10 @@ class FM4OnDemandPlaylist(object):
'link': '',
'description': '',
'url': url,
'length': int(filesize),
'mimetype': filetype,
'file_size': int(filesize),
'mime_type': filetype,
'guid': url,
'pubDate': filedate,
'published': filedate,
})
episode.save()
tracks.append(episode)