2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
#
|
2006-04-07 22:22:30 +02:00
|
|
|
# gPodder (a media aggregator / podcast client)
|
2006-12-29 16:52:52 +01:00
|
|
|
# Copyright (C) 2005-2007 Thomas Perl <thp at perli.net>
|
2005-11-21 19:21:25 +01:00
|
|
|
#
|
2006-04-07 22:22:30 +02:00
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU General Public License
|
|
|
|
# as published by the Free Software Foundation; either version 2
|
|
|
|
# of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
|
|
|
# MA 02110-1301, USA.
|
|
|
|
#
|
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
#
|
|
|
|
# libpodcasts.py -- data classes for gpodder
|
|
|
|
# thomas perl <thp@perli.net> 20051029
|
|
|
|
#
|
|
|
|
#
|
|
|
|
|
|
|
|
import gtk
|
|
|
|
import gobject
|
2006-03-19 18:44:47 +01:00
|
|
|
import htmlentitydefs
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-11-17 15:26:10 +01:00
|
|
|
from liblogger import log
|
2006-02-04 11:37:23 +01:00
|
|
|
import libgpodder
|
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
from os.path import exists
|
2006-03-29 14:41:34 +02:00
|
|
|
from os.path import basename
|
2006-04-04 00:27:57 +02:00
|
|
|
from os.path import splitext
|
2006-12-06 21:25:26 +01:00
|
|
|
import os.path
|
|
|
|
import os
|
|
|
|
import glob
|
|
|
|
import shutil
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2006-04-08 09:22:30 +02:00
|
|
|
from types import ListType
|
2006-04-08 16:40:19 +02:00
|
|
|
from datetime import datetime
|
2006-04-10 18:46:50 +02:00
|
|
|
from time import time
|
2006-04-08 09:22:30 +02:00
|
|
|
|
2006-02-04 11:37:23 +01:00
|
|
|
from liblocdbwriter import writeLocalDB
|
|
|
|
from liblocdbreader import readLocalDB
|
|
|
|
|
2007-03-15 22:33:23 +01:00
|
|
|
from libtagupdate import update_metadata_on_file
|
2007-03-17 13:11:13 +01:00
|
|
|
from libtagupdate import tagging_supported
|
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
from threading import Event
|
|
|
|
from libwget import downloadThread
|
2006-02-26 00:13:29 +01:00
|
|
|
import re
|
|
|
|
|
2006-04-10 18:46:50 +02:00
|
|
|
from email.Utils import mktime_tz
|
|
|
|
from email.Utils import parsedate_tz
|
|
|
|
|
2006-04-04 00:27:57 +02:00
|
|
|
import md5
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2006-04-08 09:22:30 +02:00
|
|
|
class podcastChannel(ListType):
|
2006-03-03 21:04:25 +01:00
|
|
|
"""holds data for a complete channel"""
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
def __init__( self, url = "", title = "", link = "", description = ""):
|
|
|
|
self.url = url
|
|
|
|
self.title = title
|
|
|
|
self.link = link
|
2006-02-26 00:13:29 +01:00
|
|
|
self.description = stripHtml( description)
|
2006-03-03 21:04:25 +01:00
|
|
|
self.image = None
|
2006-04-08 16:40:19 +02:00
|
|
|
self.pubDate = datetime.now().ctime()
|
|
|
|
self.language = ''
|
|
|
|
self.copyright = ''
|
|
|
|
self.webMaster = ''
|
2006-03-03 21:04:25 +01:00
|
|
|
self.downloaded = None
|
2006-04-07 03:43:06 +02:00
|
|
|
# should this channel be synced to devices? (ex: iPod)
|
|
|
|
self.sync_to_devices = True
|
2006-04-08 11:09:15 +02:00
|
|
|
# if this is set to true, device syncing (ex: iPod) should treat this as music, not as podcast)
|
|
|
|
self.is_music_channel = False
|
|
|
|
# to which playlist should be synced when "is_music_channel" is true?
|
|
|
|
self.device_playlist_name = 'gPodder'
|
2007-03-08 13:11:10 +01:00
|
|
|
# if set, this overrides the channel-provided title
|
|
|
|
self.override_title = ''
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def get_filename( self):
|
|
|
|
"""Return the MD5 sum of the channel URL"""
|
|
|
|
return md5.new( self.url).hexdigest()
|
|
|
|
|
|
|
|
filename = property(fget=get_filename)
|
2006-08-02 20:24:48 +02:00
|
|
|
|
|
|
|
def get_title( self):
|
2007-03-08 13:11:10 +01:00
|
|
|
if self.override_title:
|
|
|
|
return self.override_title
|
|
|
|
elif not self.__title.strip():
|
|
|
|
return self.url
|
|
|
|
else:
|
|
|
|
return self.__title
|
2006-08-02 20:24:48 +02:00
|
|
|
|
|
|
|
def set_title( self, value):
|
|
|
|
self.__title = value.strip()
|
|
|
|
|
|
|
|
title = property(fget=get_title,
|
|
|
|
fset=set_title)
|
2007-03-08 13:11:10 +01:00
|
|
|
|
|
|
|
def set_custom_title( self, custom_title):
|
|
|
|
custom_title = custom_title.strip()
|
|
|
|
|
|
|
|
if custom_title != self.__title:
|
|
|
|
self.override_title = custom_title
|
|
|
|
else:
|
|
|
|
self.override_title = ''
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-04-07 03:43:06 +02:00
|
|
|
def get_localdb_channel( self):
|
|
|
|
try:
|
2006-12-06 21:25:26 +01:00
|
|
|
locdb_reader = readLocalDB( self.url)
|
2006-04-09 14:12:19 +02:00
|
|
|
locdb_reader.parseXML( self.index_file)
|
2006-04-07 03:43:06 +02:00
|
|
|
return locdb_reader.channel
|
|
|
|
except:
|
2006-04-08 11:09:15 +02:00
|
|
|
return podcastChannel( self.url, self.title, self.link, self.description)
|
2006-04-07 03:43:06 +02:00
|
|
|
|
|
|
|
def set_localdb_channel( self, channel):
|
|
|
|
if channel != None:
|
|
|
|
try:
|
2006-12-06 21:25:26 +01:00
|
|
|
log( 'Setting localdb channel data')
|
2006-04-07 03:43:06 +02:00
|
|
|
writeLocalDB( self.index_file, channel)
|
|
|
|
except:
|
2006-11-17 15:26:10 +01:00
|
|
|
log( 'Cannot save channel in set_localdb_channel( %s)', channel.title)
|
2006-12-06 21:25:26 +01:00
|
|
|
|
|
|
|
localdb_channel = property(fget=get_localdb_channel,
|
|
|
|
fset=set_localdb_channel)
|
2006-04-07 03:43:06 +02:00
|
|
|
|
|
|
|
def set_metadata_from_localdb( self):
|
2007-03-17 13:11:13 +01:00
|
|
|
log( 'Reading metadata from %s', self.index_file)
|
2006-04-07 03:43:06 +02:00
|
|
|
libgpodder.getLock()
|
2006-12-06 21:25:26 +01:00
|
|
|
self.copy_metadata_from( self.localdb_channel)
|
2006-04-07 03:43:06 +02:00
|
|
|
libgpodder.releaseLock()
|
|
|
|
|
|
|
|
def save_metadata_to_localdb( self):
|
2007-03-17 13:11:13 +01:00
|
|
|
log( 'Saving metadata to %s', self.index_file)
|
2006-04-07 03:43:06 +02:00
|
|
|
libgpodder.getLock()
|
2006-12-06 21:25:26 +01:00
|
|
|
ch = self.localdb_channel
|
|
|
|
ch.copy_metadata_from( self)
|
|
|
|
self.localdb_channel = ch
|
2006-04-07 03:43:06 +02:00
|
|
|
libgpodder.releaseLock()
|
|
|
|
|
|
|
|
def copy_metadata_from( self, ch):
|
|
|
|
# copy all metadata fields
|
|
|
|
self.sync_to_devices = ch.sync_to_devices
|
2006-04-08 11:09:15 +02:00
|
|
|
self.is_music_channel = ch.is_music_channel
|
|
|
|
self.device_playlist_name = ch.device_playlist_name
|
2007-03-08 13:11:10 +01:00
|
|
|
self.override_title = ch.override_title
|
2006-12-06 21:25:26 +01:00
|
|
|
|
|
|
|
def newest_pubdate_downloaded( self):
|
2006-12-08 21:58:30 +01:00
|
|
|
gl = libgpodder.gPodderLib()
|
|
|
|
|
|
|
|
# Try DownloadHistory's entries first
|
|
|
|
for episode in self:
|
2006-12-13 01:28:26 +01:00
|
|
|
if gl.history_is_downloaded( episode.url):
|
|
|
|
return episode.pubDate
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2006-12-08 21:58:30 +01:00
|
|
|
# If nothing found, do pubDate comparison
|
|
|
|
pubdate = None
|
2006-12-06 21:25:26 +01:00
|
|
|
for episode in self.localdb_channel:
|
|
|
|
pubdate = episode.newer_pubdate( pubdate)
|
|
|
|
return pubdate
|
2007-03-12 12:21:33 +01:00
|
|
|
|
|
|
|
def can_sort_by_pubdate( self):
|
|
|
|
for episode in self:
|
|
|
|
try:
|
|
|
|
mktime_tz(parsedate_tz( episode.pubDate))
|
|
|
|
except:
|
|
|
|
log('Episode %s has non-parseable pubDate. Sorting disabled.', episode.title)
|
|
|
|
return False
|
|
|
|
can_sort = False
|
|
|
|
|
|
|
|
return True
|
2006-04-07 03:43:06 +02:00
|
|
|
|
2006-02-04 11:37:23 +01:00
|
|
|
def addDownloadedItem( self, item):
|
2006-03-24 20:08:59 +01:00
|
|
|
# no multithreaded access
|
2006-03-19 15:21:48 +01:00
|
|
|
libgpodder.getLock()
|
2006-03-04 21:45:01 +01:00
|
|
|
localdb = self.index_file
|
2006-11-17 15:26:10 +01:00
|
|
|
log( 'Local database: %s', localdb)
|
2006-02-04 11:37:23 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
self.downloaded = self.localdb_channel
|
2007-03-15 22:33:23 +01:00
|
|
|
|
2006-04-03 21:43:59 +02:00
|
|
|
already_in_list = False
|
|
|
|
# try to find the new item in the list
|
2006-04-08 09:22:30 +02:00
|
|
|
for it in self.downloaded:
|
2006-04-03 21:43:59 +02:00
|
|
|
if it.equals( item):
|
|
|
|
already_in_list = True
|
|
|
|
break
|
|
|
|
|
|
|
|
# only append if not already in list
|
|
|
|
if not already_in_list:
|
2006-04-08 09:22:30 +02:00
|
|
|
self.downloaded.append( item)
|
2007-03-14 20:35:15 +01:00
|
|
|
writeLocalDB( localdb, self.downloaded)
|
2006-12-08 21:58:30 +01:00
|
|
|
|
2007-03-15 22:33:23 +01:00
|
|
|
# Update metadata on file (if possible and wanted)
|
2007-03-17 13:11:13 +01:00
|
|
|
if libgpodder.gPodderLib().update_tags and tagging_supported():
|
2007-03-15 22:33:23 +01:00
|
|
|
filename = self.getPodcastFilename( item.url)
|
|
|
|
try:
|
2007-04-01 19:53:04 +02:00
|
|
|
update_metadata_on_file( filename, title = item.title, artist = self.title)
|
2007-03-15 22:33:23 +01:00
|
|
|
except:
|
|
|
|
log('Error while calling update_metadata_on_file() :(')
|
|
|
|
|
2006-12-08 21:58:30 +01:00
|
|
|
libgpodder.gPodderLib().history_mark_downloaded( item.url)
|
2006-04-03 21:43:59 +02:00
|
|
|
|
2006-03-19 15:21:48 +01:00
|
|
|
libgpodder.releaseLock()
|
2006-04-03 21:43:59 +02:00
|
|
|
return not already_in_list
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
def printChannel( self):
|
2006-03-03 21:04:25 +01:00
|
|
|
print '- Channel: "' + self.title + '"'
|
2006-04-08 09:22:30 +02:00
|
|
|
for item in self:
|
2006-03-03 21:04:25 +01:00
|
|
|
print '-- Item: "' + item.title + '"'
|
2005-11-22 14:30:28 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def is_downloaded( self, item):
|
2006-03-29 14:41:34 +02:00
|
|
|
return self.podcastFilenameExists( item.url)
|
2005-11-22 14:30:28 +01:00
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
def get_all_episodes( self):
|
|
|
|
episodes = []
|
|
|
|
added_urls = []
|
|
|
|
|
|
|
|
for item in [] + self + self.localdb_channel:
|
|
|
|
if item.url and item.url not in added_urls:
|
|
|
|
episodes.append( item)
|
|
|
|
added_urls.append( item.url)
|
|
|
|
|
|
|
|
return episodes
|
|
|
|
|
|
|
|
def items_liststore( self, want_color = True, downloading_callback = None):
|
2006-12-06 21:25:26 +01:00
|
|
|
"""Return a gtk.ListStore containing episodes for this channel
|
|
|
|
|
|
|
|
If want_color is True (the default), this will set special colors
|
|
|
|
for already downloaded episodes and download-in-progress episodes.
|
|
|
|
|
|
|
|
If downloading_callback is set, this should be a function that takes
|
|
|
|
the URL of the episodes and returns True if the episode is currently
|
|
|
|
being downloaded and False otherwise.
|
|
|
|
"""
|
2006-11-20 12:51:20 +01:00
|
|
|
new_model = gtk.ListStore( gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
|
2007-01-28 10:21:39 +01:00
|
|
|
gl = libgpodder.gPodderLib()
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
for item in self.get_all_episodes():
|
|
|
|
if self.is_downloaded( item) and want_color:
|
2007-01-28 10:21:39 +01:00
|
|
|
background_color = gl.colors['downloaded']
|
2006-12-09 01:41:58 +01:00
|
|
|
elif downloading_callback and downloading_callback( item.url) and want_color:
|
2007-01-28 10:21:39 +01:00
|
|
|
background_color = gl.colors['downloading']
|
2006-12-09 01:41:58 +01:00
|
|
|
elif libgpodder.gPodderLib().history_is_downloaded( item.url) and want_color:
|
2007-01-28 10:21:39 +01:00
|
|
|
background_color = gl.colors['deleted']
|
2006-12-09 01:41:58 +01:00
|
|
|
else:
|
2007-01-28 10:21:39 +01:00
|
|
|
background_color = gl.colors['default']
|
2006-12-09 01:41:58 +01:00
|
|
|
new_iter = new_model.append()
|
|
|
|
new_model.set( new_iter, 0, item.url)
|
|
|
|
new_model.set( new_iter, 1, item.title)
|
|
|
|
new_model.set( new_iter, 2, item.getSize())
|
|
|
|
new_model.set( new_iter, 3, True)
|
|
|
|
new_model.set( new_iter, 4, background_color)
|
|
|
|
new_model.set( new_iter, 5, item.cute_pubdate())
|
|
|
|
new_model.set( new_iter, 6, item.one_line_description())
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
return new_model
|
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
def find_episode( self, url):
|
|
|
|
for item in self.get_all_episodes():
|
|
|
|
if url == item.url:
|
|
|
|
return item
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
return None
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2007-03-10 16:57:56 +01:00
|
|
|
def downloadRss( self, force_update = True, callback_error = None, callback_is_cancelled = None):
|
|
|
|
if callback_is_cancelled:
|
|
|
|
if callback_is_cancelled() == True:
|
|
|
|
return self.cache_file
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
if not exists( self.cache_file) or force_update:
|
2006-03-29 13:51:25 +02:00
|
|
|
# remove old cache file
|
2007-03-08 11:48:09 +01:00
|
|
|
self.remove_cache_file()
|
2006-03-03 21:04:25 +01:00
|
|
|
event = Event()
|
2007-03-10 16:57:56 +01:00
|
|
|
download_thread = downloadThread( self.url, self.cache_file, event)
|
|
|
|
download_thread.download()
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2007-03-10 16:57:56 +01:00
|
|
|
while not event.isSet():
|
|
|
|
if callback_is_cancelled:
|
|
|
|
if callback_is_cancelled() == True:
|
|
|
|
download_thread.cancel()
|
|
|
|
self.restore_cache_file()
|
2006-03-03 21:04:25 +01:00
|
|
|
event.wait( 0.2)
|
2006-03-29 13:51:25 +02:00
|
|
|
|
|
|
|
# check if download was a success
|
2007-03-08 11:48:09 +01:00
|
|
|
if not exists( self.cache_file):
|
|
|
|
log('(downloadRss) Download failed! Trying to restore cache file..')
|
|
|
|
restored = self.restore_cache_file()
|
|
|
|
if callback_error:
|
|
|
|
if restored:
|
|
|
|
callback_error( _('Error downloading %s. Using cached file instead.') % ( self.url, ))
|
|
|
|
else:
|
2007-03-10 18:42:32 +01:00
|
|
|
callback_error( _('Error downloading %s.') % ( self.url, ))
|
2007-03-08 11:48:09 +01:00
|
|
|
return restored
|
2006-03-03 21:04:25 +01:00
|
|
|
|
|
|
|
return self.cache_file
|
|
|
|
|
|
|
|
def get_save_dir(self):
|
2006-12-06 21:25:26 +01:00
|
|
|
save_dir = os.path.join( libgpodder.gPodderLib().downloaddir, self.filename ) + '/'
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
# Create save_dir if it does not yet exist
|
|
|
|
if libgpodder.gPodderLib().createIfNecessary( save_dir) == False:
|
|
|
|
log( '(libpodcasts) Could not create: %s', save_dir)
|
2006-04-14 14:56:16 +02:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
return save_dir
|
|
|
|
|
|
|
|
save_dir = property(fget=get_save_dir)
|
2006-03-03 21:04:25 +01:00
|
|
|
|
|
|
|
def get_cache_file(self):
|
2006-12-06 21:25:26 +01:00
|
|
|
return libgpodder.gPodderLib().cachedir + self.filename + '.xml'
|
2006-03-03 21:04:25 +01:00
|
|
|
|
|
|
|
cache_file = property(fget=get_cache_file)
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2007-03-08 11:48:09 +01:00
|
|
|
def get_cache_backup_file( self):
|
|
|
|
return libgpodder.gPodderLib().cachedir + self.filename + '.bak'
|
|
|
|
|
|
|
|
cache_backup_file = property(fget=get_cache_backup_file)
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def remove_cache_file( self):
|
2007-03-08 11:48:09 +01:00
|
|
|
if exists( self.cache_file):
|
|
|
|
shutil.copyfile( self.cache_file, self.cache_backup_file)
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
libgpodder.gPodderLib().deleteFilename( self.cache_file)
|
|
|
|
|
2007-03-08 11:48:09 +01:00
|
|
|
def restore_cache_file( self):
|
|
|
|
if exists( self.cache_backup_file):
|
|
|
|
shutil.copyfile( self.cache_backup_file, self.cache_file)
|
|
|
|
log('Successfully restored cache file from old backup :)')
|
|
|
|
return self.cache_file
|
|
|
|
|
|
|
|
log('Could not restore cache file, sorry..')
|
|
|
|
return None
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def remove_downloaded( self):
|
|
|
|
shutil.rmtree( self.save_dir, True)
|
2006-03-03 21:04:25 +01:00
|
|
|
|
|
|
|
def get_index_file(self):
|
|
|
|
# gets index xml filename for downloaded channels list
|
2006-12-06 21:25:26 +01:00
|
|
|
return os.path.join( self.save_dir, 'index.xml')
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
index_file = property(fget=get_index_file)
|
2006-03-29 14:41:34 +02:00
|
|
|
|
2006-03-31 18:20:18 +02:00
|
|
|
def get_cover_file( self):
|
|
|
|
# gets cover filename for cover download cache
|
2006-12-06 21:25:26 +01:00
|
|
|
return os.path.join( self.save_dir, 'cover')
|
2006-03-31 18:20:18 +02:00
|
|
|
|
|
|
|
cover_file = property(fget=get_cover_file)
|
|
|
|
|
2006-03-29 14:41:34 +02:00
|
|
|
def getPodcastFilename( self, url):
|
|
|
|
# strip question mark (and everything behind it), fix %20 errors
|
2006-12-06 21:25:26 +01:00
|
|
|
filename = basename( url).replace( '%20', ' ')
|
|
|
|
indexOfQuestionMark = filename.rfind( '?')
|
2006-03-29 14:41:34 +02:00
|
|
|
if indexOfQuestionMark != -1:
|
|
|
|
filename = filename[:indexOfQuestionMark]
|
|
|
|
# end strip questionmark
|
2006-04-04 00:27:57 +02:00
|
|
|
extension = splitext( filename)[1].lower()
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
return self.save_dir + md5.new(url).hexdigest() + extension
|
2006-03-29 14:41:34 +02:00
|
|
|
|
|
|
|
def podcastFilenameExists( self, url):
|
|
|
|
return exists( self.getPodcastFilename( url))
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def delete_episode_by_url(self, url):
|
|
|
|
log( 'Delete %s', url)
|
2006-03-24 20:08:59 +01:00
|
|
|
# no multithreaded access
|
|
|
|
libgpodder.getLock()
|
2006-12-06 21:25:26 +01:00
|
|
|
|
|
|
|
new_localdb = self.localdb_channel
|
|
|
|
|
|
|
|
for item in new_localdb:
|
|
|
|
if item.url == url:
|
|
|
|
new_localdb.remove(item)
|
|
|
|
|
|
|
|
self.localdb_channel = new_localdb
|
|
|
|
|
2007-03-14 20:35:15 +01:00
|
|
|
# clean-up downloaded file
|
|
|
|
if self.podcastFilenameExists( url):
|
|
|
|
episode_filename = self.getPodcastFilename( url)
|
|
|
|
libgpodder.gPodderLib().deleteFilename( episode_filename)
|
|
|
|
|
2006-03-24 20:08:59 +01:00
|
|
|
libgpodder.releaseLock()
|
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
class podcastItem(object):
|
|
|
|
"""holds data for one object in a channel"""
|
|
|
|
def __init__( self,
|
|
|
|
url = "",
|
|
|
|
title = "",
|
|
|
|
length = "0",
|
|
|
|
mimetype = "",
|
|
|
|
guid = "",
|
|
|
|
description = "",
|
2006-04-08 16:40:19 +02:00
|
|
|
link = "",
|
|
|
|
pubDate = None):
|
2005-11-21 19:21:25 +01:00
|
|
|
self.url = url
|
|
|
|
self.title = title
|
|
|
|
self.length = length
|
|
|
|
self.mimetype = mimetype
|
|
|
|
self.guid = guid
|
2006-02-26 00:13:29 +01:00
|
|
|
self.description = stripHtml( description)
|
2005-11-21 19:21:25 +01:00
|
|
|
self.link = ""
|
2006-04-08 16:40:19 +02:00
|
|
|
self.pubDate = pubDate
|
|
|
|
if pubDate == None:
|
|
|
|
self.pubDate = datetime.now().ctime()
|
2006-11-20 12:51:20 +01:00
|
|
|
|
|
|
|
def one_line_description( self):
|
|
|
|
lines = self.description.strip().splitlines()
|
|
|
|
if not lines or lines[0] == '':
|
|
|
|
return _('No description available')
|
|
|
|
else:
|
|
|
|
desc = lines[0].strip()
|
2006-11-22 16:15:32 +01:00
|
|
|
if len( desc) > 84:
|
2006-11-20 12:51:20 +01:00
|
|
|
return desc[:80] + '...'
|
|
|
|
else:
|
|
|
|
return desc
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2007-03-12 12:21:33 +01:00
|
|
|
def __cmp__( self, other):
|
|
|
|
try:
|
|
|
|
timestamp_self = int(mktime_tz( parsedate_tz( self.pubDate)))
|
|
|
|
timestamp_other = int(mktime_tz( parsedate_tz( other.pubDate)))
|
|
|
|
except:
|
2007-03-14 20:35:15 +01:00
|
|
|
# by default, do as if this is not the same
|
|
|
|
# this is here so that comparisons with None
|
|
|
|
# can be allowed (item != None -> True)
|
|
|
|
return -1
|
2007-03-12 12:21:33 +01:00
|
|
|
|
|
|
|
return timestamp_self - timestamp_other
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def compare_pubdate( self, pubdate):
|
|
|
|
try:
|
|
|
|
timestamp_self = int(mktime_tz( parsedate_tz( self.pubDate)))
|
|
|
|
except:
|
|
|
|
return -1
|
|
|
|
|
|
|
|
try:
|
|
|
|
timestamp_other = int(mktime_tz( parsedate_tz( pubdate)))
|
|
|
|
except:
|
|
|
|
return 1
|
|
|
|
|
|
|
|
return timestamp_self - timestamp_other
|
|
|
|
|
|
|
|
def newer_pubdate( self, pubdate = None):
|
|
|
|
if self.compare_pubdate( pubdate) > 0:
|
|
|
|
return self.pubDate
|
|
|
|
else:
|
|
|
|
return pubdate
|
2006-04-10 18:46:50 +02:00
|
|
|
|
|
|
|
def cute_pubdate( self):
|
|
|
|
seconds_in_a_day = 86400
|
|
|
|
try:
|
|
|
|
timestamp = int(mktime_tz( parsedate_tz( self.pubDate)))
|
|
|
|
except:
|
|
|
|
return _("(unknown)")
|
2006-04-10 19:07:40 +02:00
|
|
|
diff = int((time()+1)/seconds_in_a_day) - int(timestamp/seconds_in_a_day)
|
2006-04-10 18:46:50 +02:00
|
|
|
|
|
|
|
if diff == 0:
|
|
|
|
return _("Today")
|
|
|
|
if diff == 1:
|
|
|
|
return _("Yesterday")
|
|
|
|
if diff < 7:
|
|
|
|
return str(datetime.fromtimestamp( timestamp).strftime( "%A"))
|
|
|
|
|
|
|
|
return str(datetime.fromtimestamp( timestamp).strftime( "%x"))
|
2006-12-09 01:41:58 +01:00
|
|
|
|
|
|
|
def calculate_filesize( self, channel):
|
|
|
|
try:
|
|
|
|
self.length = str(os.path.getsize( channel.getPodcastFilename( self.url)))
|
|
|
|
except:
|
|
|
|
log( 'Could not get filesize for %s.', self.url)
|
2006-04-10 18:46:50 +02:00
|
|
|
|
2006-04-03 21:43:59 +02:00
|
|
|
def equals( self, other_item):
|
|
|
|
if other_item == None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return self.url == other_item.url
|
2006-08-02 20:24:48 +02:00
|
|
|
|
|
|
|
def get_title( self):
|
|
|
|
return self.__title
|
|
|
|
|
|
|
|
def set_title( self, value):
|
|
|
|
self.__title = value.strip()
|
|
|
|
|
|
|
|
title = property(fget=get_title,
|
|
|
|
fset=set_title)
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
def getSize( self):
|
2006-04-08 14:43:34 +02:00
|
|
|
try:
|
|
|
|
size = int( self.length)
|
|
|
|
except ValueError:
|
2006-12-06 21:25:26 +01:00
|
|
|
return '-'
|
2006-12-20 17:38:36 +01:00
|
|
|
|
|
|
|
return libgpodder.gPodderLib().size_to_string( size)
|
2006-04-08 14:43:34 +02:00
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-06-13 23:00:31 +02:00
|
|
|
|
|
|
|
class opmlChannel(object):
|
|
|
|
def __init__( self, xmlurl, title = 'Unknown OPML Channel'):
|
|
|
|
self.title = title
|
|
|
|
self.xmlurl = xmlurl
|
|
|
|
|
|
|
|
|
2006-12-08 21:58:30 +01:00
|
|
|
class DownloadHistory( ListType):
|
|
|
|
def __init__( self, filename):
|
|
|
|
self.filename = filename
|
|
|
|
try:
|
|
|
|
self.read_from_file()
|
|
|
|
except:
|
|
|
|
log( '(DownloadHistory) Creating new history list.')
|
|
|
|
|
|
|
|
def read_from_file( self):
|
|
|
|
for line in open( self.filename, 'r'):
|
|
|
|
self.append( line.strip())
|
|
|
|
|
|
|
|
def save_to_file( self):
|
|
|
|
if len( self):
|
|
|
|
fp = open( self.filename, 'w')
|
|
|
|
for url in self:
|
|
|
|
fp.write( url + "\n")
|
|
|
|
fp.close()
|
|
|
|
log( '(DownloadHistory) Wrote %d history entries.', len( self))
|
|
|
|
|
|
|
|
def mark_downloaded( self, data, autosave = True):
|
|
|
|
affected = 0
|
|
|
|
if data and type( data) is ListType:
|
|
|
|
# Support passing a list of urls to this function
|
|
|
|
for url in data:
|
|
|
|
affected = affected + self.mark_downloaded( url, autosave = False)
|
|
|
|
else:
|
|
|
|
if data not in self:
|
|
|
|
log( '(DownloadHistory) Marking as downloaded: %s', data)
|
|
|
|
self.append( data)
|
|
|
|
affected = affected + 1
|
|
|
|
|
|
|
|
if affected and autosave:
|
|
|
|
self.save_to_file()
|
|
|
|
|
|
|
|
return affected
|
|
|
|
|
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
def channelsToModel( channels):
|
2006-12-06 21:25:26 +01:00
|
|
|
new_model = gtk.ListStore( gobject.TYPE_STRING, gobject.TYPE_STRING)
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
for channel in channels:
|
|
|
|
new_iter = new_model.append()
|
|
|
|
new_model.set( new_iter, 0, channel.url)
|
2006-12-06 21:25:26 +01:00
|
|
|
new_model.set( new_iter, 1, channel.title)
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
return new_model
|
|
|
|
|
2006-02-26 00:13:29 +01:00
|
|
|
def stripHtml( html):
|
|
|
|
# strips html from a string (fix for <description> tags containing html)
|
|
|
|
rexp = re.compile( "<[^>]*>")
|
2006-03-19 18:44:47 +01:00
|
|
|
stripstr = rexp.sub( "", html)
|
2007-03-21 21:34:09 +01:00
|
|
|
# replaces numeric entities with entity names
|
|
|
|
dict = htmlentitydefs.codepoint2name
|
|
|
|
for key in dict.keys():
|
|
|
|
stripstr = stripstr.replace( '&#'+str(key)+';', '&'+unicode( dict[key], 'iso-8859-1')+';')
|
2006-03-19 18:44:47 +01:00
|
|
|
# strips html entities
|
2007-03-21 21:34:09 +01:00
|
|
|
dict = htmlentitydefs.entitydefs
|
2006-03-19 18:44:47 +01:00
|
|
|
for key in dict.keys():
|
|
|
|
stripstr = stripstr.replace( '&'+unicode(key,'iso-8859-1')+';', unicode(dict[key], 'iso-8859-1'))
|
|
|
|
return stripstr
|
|
|
|
|
2006-12-08 21:58:30 +01:00
|
|
|
|