2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
#
|
2006-04-07 22:22:30 +02:00
|
|
|
# gPodder (a media aggregator / podcast client)
|
2006-12-29 16:52:52 +01:00
|
|
|
# Copyright (C) 2005-2007 Thomas Perl <thp at perli.net>
|
2005-11-21 19:21:25 +01:00
|
|
|
#
|
2006-04-07 22:22:30 +02:00
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU General Public License
|
|
|
|
# as published by the Free Software Foundation; either version 2
|
|
|
|
# of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
|
|
|
# MA 02110-1301, USA.
|
|
|
|
#
|
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
#
|
|
|
|
# libpodcasts.py -- data classes for gpodder
|
|
|
|
# thomas perl <thp@perli.net> 20051029
|
|
|
|
#
|
|
|
|
#
|
|
|
|
|
|
|
|
import gtk
|
|
|
|
import gobject
|
2007-07-05 23:07:16 +02:00
|
|
|
import pango
|
2007-08-07 20:11:31 +02:00
|
|
|
|
|
|
|
from gpodder import util
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-11-17 15:26:10 +01:00
|
|
|
from liblogger import log
|
2006-02-04 11:37:23 +01:00
|
|
|
import libgpodder
|
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
from os.path import exists
|
2006-03-29 14:41:34 +02:00
|
|
|
from os.path import basename
|
2006-04-04 00:27:57 +02:00
|
|
|
from os.path import splitext
|
2006-12-06 21:25:26 +01:00
|
|
|
import os.path
|
|
|
|
import os
|
|
|
|
import glob
|
|
|
|
import shutil
|
2007-08-19 09:23:02 +02:00
|
|
|
import sys
|
2007-06-06 13:56:07 +02:00
|
|
|
from urllib import unquote
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2006-04-08 09:22:30 +02:00
|
|
|
from types import ListType
|
2006-04-08 16:40:19 +02:00
|
|
|
from datetime import datetime
|
2006-04-10 18:46:50 +02:00
|
|
|
from time import time
|
2006-04-08 09:22:30 +02:00
|
|
|
|
2006-02-04 11:37:23 +01:00
|
|
|
from liblocdbwriter import writeLocalDB
|
|
|
|
from liblocdbreader import readLocalDB
|
|
|
|
|
2007-03-15 22:33:23 +01:00
|
|
|
from libtagupdate import update_metadata_on_file
|
2007-03-17 13:11:13 +01:00
|
|
|
from libtagupdate import tagging_supported
|
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
from threading import Event
|
|
|
|
from libwget import downloadThread
|
2006-02-26 00:13:29 +01:00
|
|
|
import re
|
|
|
|
|
2006-04-10 18:46:50 +02:00
|
|
|
from email.Utils import mktime_tz
|
|
|
|
from email.Utils import parsedate_tz
|
|
|
|
|
2007-07-05 23:07:16 +02:00
|
|
|
from xml.sax import saxutils
|
|
|
|
|
2006-04-04 00:27:57 +02:00
|
|
|
import md5
|
|
|
|
|
2007-07-19 14:44:12 +02:00
|
|
|
import string
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2006-04-08 09:22:30 +02:00
|
|
|
class podcastChannel(ListType):
|
2006-03-03 21:04:25 +01:00
|
|
|
"""holds data for a complete channel"""
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
def __init__( self, url = "", title = "", link = "", description = ""):
|
|
|
|
self.url = url
|
|
|
|
self.title = title
|
|
|
|
self.link = link
|
2007-08-07 20:11:31 +02:00
|
|
|
self.description = util.remove_html_tags( description)
|
2006-03-03 21:04:25 +01:00
|
|
|
self.image = None
|
2006-04-08 16:40:19 +02:00
|
|
|
self.pubDate = datetime.now().ctime()
|
|
|
|
self.language = ''
|
|
|
|
self.copyright = ''
|
|
|
|
self.webMaster = ''
|
2006-03-03 21:04:25 +01:00
|
|
|
self.downloaded = None
|
2006-04-07 03:43:06 +02:00
|
|
|
# should this channel be synced to devices? (ex: iPod)
|
|
|
|
self.sync_to_devices = True
|
2006-04-08 11:09:15 +02:00
|
|
|
# if this is set to true, device syncing (ex: iPod) should treat this as music, not as podcast)
|
|
|
|
self.is_music_channel = False
|
|
|
|
# to which playlist should be synced when "is_music_channel" is true?
|
|
|
|
self.device_playlist_name = 'gPodder'
|
2007-03-08 13:11:10 +01:00
|
|
|
# if set, this overrides the channel-provided title
|
|
|
|
self.override_title = ''
|
2007-07-19 14:44:12 +02:00
|
|
|
self.username = ''
|
|
|
|
self.password = ''
|
|
|
|
# mapping table for maketrans
|
|
|
|
self.map_from = 'abcdefghijklmnopqrstuvwxyz0123456789'
|
|
|
|
self.map_to = 'qazwsxedcrfvtgbyhnujmikolp9514738062'
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def get_filename( self):
|
|
|
|
"""Return the MD5 sum of the channel URL"""
|
|
|
|
return md5.new( self.url).hexdigest()
|
|
|
|
|
|
|
|
filename = property(fget=get_filename)
|
2006-08-02 20:24:48 +02:00
|
|
|
|
|
|
|
def get_title( self):
|
2007-03-08 13:11:10 +01:00
|
|
|
if self.override_title:
|
|
|
|
return self.override_title
|
|
|
|
elif not self.__title.strip():
|
|
|
|
return self.url
|
|
|
|
else:
|
|
|
|
return self.__title
|
2006-08-02 20:24:48 +02:00
|
|
|
|
|
|
|
def set_title( self, value):
|
|
|
|
self.__title = value.strip()
|
|
|
|
|
|
|
|
title = property(fget=get_title,
|
|
|
|
fset=set_title)
|
2007-03-08 13:11:10 +01:00
|
|
|
|
|
|
|
def set_custom_title( self, custom_title):
|
|
|
|
custom_title = custom_title.strip()
|
|
|
|
|
|
|
|
if custom_title != self.__title:
|
|
|
|
self.override_title = custom_title
|
|
|
|
else:
|
|
|
|
self.override_title = ''
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-04-07 03:43:06 +02:00
|
|
|
def get_localdb_channel( self):
|
|
|
|
try:
|
2006-12-06 21:25:26 +01:00
|
|
|
locdb_reader = readLocalDB( self.url)
|
2006-04-09 14:12:19 +02:00
|
|
|
locdb_reader.parseXML( self.index_file)
|
2006-04-07 03:43:06 +02:00
|
|
|
return locdb_reader.channel
|
|
|
|
except:
|
2006-04-08 11:09:15 +02:00
|
|
|
return podcastChannel( self.url, self.title, self.link, self.description)
|
2006-04-07 03:43:06 +02:00
|
|
|
|
|
|
|
def set_localdb_channel( self, channel):
|
|
|
|
if channel != None:
|
|
|
|
try:
|
2006-12-06 21:25:26 +01:00
|
|
|
log( 'Setting localdb channel data')
|
2006-04-07 03:43:06 +02:00
|
|
|
writeLocalDB( self.index_file, channel)
|
|
|
|
except:
|
2006-11-17 15:26:10 +01:00
|
|
|
log( 'Cannot save channel in set_localdb_channel( %s)', channel.title)
|
2006-12-06 21:25:26 +01:00
|
|
|
|
|
|
|
localdb_channel = property(fget=get_localdb_channel,
|
|
|
|
fset=set_localdb_channel)
|
2006-04-07 03:43:06 +02:00
|
|
|
|
|
|
|
def set_metadata_from_localdb( self):
|
2007-03-17 13:11:13 +01:00
|
|
|
log( 'Reading metadata from %s', self.index_file)
|
2006-04-07 03:43:06 +02:00
|
|
|
libgpodder.getLock()
|
2006-12-06 21:25:26 +01:00
|
|
|
self.copy_metadata_from( self.localdb_channel)
|
2006-04-07 03:43:06 +02:00
|
|
|
libgpodder.releaseLock()
|
|
|
|
|
|
|
|
def save_metadata_to_localdb( self):
|
2007-03-17 13:11:13 +01:00
|
|
|
log( 'Saving metadata to %s', self.index_file)
|
2006-04-07 03:43:06 +02:00
|
|
|
libgpodder.getLock()
|
2006-12-06 21:25:26 +01:00
|
|
|
ch = self.localdb_channel
|
|
|
|
ch.copy_metadata_from( self)
|
|
|
|
self.localdb_channel = ch
|
2006-04-07 03:43:06 +02:00
|
|
|
libgpodder.releaseLock()
|
|
|
|
|
|
|
|
def copy_metadata_from( self, ch):
|
|
|
|
# copy all metadata fields
|
|
|
|
self.sync_to_devices = ch.sync_to_devices
|
2006-04-08 11:09:15 +02:00
|
|
|
self.is_music_channel = ch.is_music_channel
|
|
|
|
self.device_playlist_name = ch.device_playlist_name
|
2007-03-08 13:11:10 +01:00
|
|
|
self.override_title = ch.override_title
|
2007-07-19 14:44:12 +02:00
|
|
|
self.username = ch.username
|
|
|
|
self.password = ch.password
|
2006-12-06 21:25:26 +01:00
|
|
|
|
|
|
|
def newest_pubdate_downloaded( self):
|
2006-12-08 21:58:30 +01:00
|
|
|
gl = libgpodder.gPodderLib()
|
|
|
|
|
|
|
|
# Try DownloadHistory's entries first
|
|
|
|
for episode in self:
|
2006-12-13 01:28:26 +01:00
|
|
|
if gl.history_is_downloaded( episode.url):
|
|
|
|
return episode.pubDate
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2006-12-08 21:58:30 +01:00
|
|
|
# If nothing found, do pubDate comparison
|
|
|
|
pubdate = None
|
2006-12-06 21:25:26 +01:00
|
|
|
for episode in self.localdb_channel:
|
|
|
|
pubdate = episode.newer_pubdate( pubdate)
|
|
|
|
return pubdate
|
2007-03-12 12:21:33 +01:00
|
|
|
|
2007-07-05 23:07:16 +02:00
|
|
|
def get_new_episodes( self, download_status_manager = None):
|
|
|
|
last_pubdate = self.newest_pubdate_downloaded()
|
|
|
|
gl = libgpodder.gPodderLib()
|
|
|
|
|
|
|
|
if not last_pubdate:
|
|
|
|
return self[0:min(len(self),gl.default_new)]
|
|
|
|
|
|
|
|
new_episodes = []
|
|
|
|
|
|
|
|
for episode in self.get_all_episodes():
|
|
|
|
# episode is older than newest downloaded
|
|
|
|
if episode.compare_pubdate( last_pubdate) < 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# episode has been downloaded before
|
|
|
|
if self.is_downloaded( episode) or gl.history_is_downloaded( episode.url):
|
|
|
|
continue
|
|
|
|
|
|
|
|
# download is currently in progress
|
|
|
|
if download_status_manager and download_status_manager.is_download_in_progress( episode.url):
|
|
|
|
continue
|
|
|
|
|
|
|
|
new_episodes.append( episode)
|
|
|
|
|
|
|
|
return new_episodes
|
|
|
|
|
2007-03-12 12:21:33 +01:00
|
|
|
def can_sort_by_pubdate( self):
|
|
|
|
for episode in self:
|
|
|
|
try:
|
|
|
|
mktime_tz(parsedate_tz( episode.pubDate))
|
|
|
|
except:
|
|
|
|
log('Episode %s has non-parseable pubDate. Sorting disabled.', episode.title)
|
|
|
|
return False
|
|
|
|
can_sort = False
|
|
|
|
|
|
|
|
return True
|
2006-04-07 03:43:06 +02:00
|
|
|
|
2006-02-04 11:37:23 +01:00
|
|
|
def addDownloadedItem( self, item):
|
2006-03-24 20:08:59 +01:00
|
|
|
# no multithreaded access
|
2006-03-19 15:21:48 +01:00
|
|
|
libgpodder.getLock()
|
2006-03-04 21:45:01 +01:00
|
|
|
localdb = self.index_file
|
2006-11-17 15:26:10 +01:00
|
|
|
log( 'Local database: %s', localdb)
|
2006-02-04 11:37:23 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
self.downloaded = self.localdb_channel
|
2007-03-15 22:33:23 +01:00
|
|
|
|
2006-04-03 21:43:59 +02:00
|
|
|
already_in_list = False
|
|
|
|
# try to find the new item in the list
|
2006-04-08 09:22:30 +02:00
|
|
|
for it in self.downloaded:
|
2006-04-03 21:43:59 +02:00
|
|
|
if it.equals( item):
|
|
|
|
already_in_list = True
|
|
|
|
break
|
|
|
|
|
|
|
|
# only append if not already in list
|
|
|
|
if not already_in_list:
|
2006-04-08 09:22:30 +02:00
|
|
|
self.downloaded.append( item)
|
2007-03-14 20:35:15 +01:00
|
|
|
writeLocalDB( localdb, self.downloaded)
|
2006-12-08 21:58:30 +01:00
|
|
|
|
2007-03-15 22:33:23 +01:00
|
|
|
# Update metadata on file (if possible and wanted)
|
2007-03-17 13:11:13 +01:00
|
|
|
if libgpodder.gPodderLib().update_tags and tagging_supported():
|
2007-03-15 22:33:23 +01:00
|
|
|
filename = self.getPodcastFilename( item.url)
|
|
|
|
try:
|
2007-04-01 19:53:04 +02:00
|
|
|
update_metadata_on_file( filename, title = item.title, artist = self.title)
|
2007-03-15 22:33:23 +01:00
|
|
|
except:
|
|
|
|
log('Error while calling update_metadata_on_file() :(')
|
|
|
|
|
2006-12-08 21:58:30 +01:00
|
|
|
libgpodder.gPodderLib().history_mark_downloaded( item.url)
|
2006-04-03 21:43:59 +02:00
|
|
|
|
2007-04-09 21:40:36 +02:00
|
|
|
if self.get_file_type( item) == 'torrent':
|
|
|
|
torrent_filename = self.getPodcastFilename( item.url)
|
|
|
|
destination_filename = self.get_torrent_filename( torrent_filename)
|
|
|
|
libgpodder.gPodderLib().invoke_torrent( item.url, torrent_filename, destination_filename)
|
|
|
|
|
2006-03-19 15:21:48 +01:00
|
|
|
libgpodder.releaseLock()
|
2006-04-03 21:43:59 +02:00
|
|
|
return not already_in_list
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
def printChannel( self):
|
2006-03-03 21:04:25 +01:00
|
|
|
print '- Channel: "' + self.title + '"'
|
2006-04-08 09:22:30 +02:00
|
|
|
for item in self:
|
2006-03-03 21:04:25 +01:00
|
|
|
print '-- Item: "' + item.title + '"'
|
2005-11-22 14:30:28 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def is_downloaded( self, item):
|
2006-03-29 14:41:34 +02:00
|
|
|
return self.podcastFilenameExists( item.url)
|
2007-04-23 17:18:31 +02:00
|
|
|
|
|
|
|
def is_played(self, item):
|
|
|
|
return libgpodder.gPodderLib().history_is_played( item.url)
|
2005-11-22 14:30:28 +01:00
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
def get_all_episodes( self):
|
|
|
|
episodes = []
|
|
|
|
added_urls = []
|
2007-04-06 20:10:22 +02:00
|
|
|
added_guids = []
|
2006-12-09 01:41:58 +01:00
|
|
|
|
2007-04-06 20:10:22 +02:00
|
|
|
# go through all episodes (both new and downloaded),
|
|
|
|
# prefer already-downloaded (in localdb)
|
|
|
|
for item in [] + self.localdb_channel + self:
|
|
|
|
# skip items with the same guid (if it has a guid)
|
|
|
|
if item.guid and item.guid in added_guids:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# skip items with the same download url
|
|
|
|
if item.url in added_urls:
|
|
|
|
continue
|
|
|
|
|
|
|
|
episodes.append( item)
|
|
|
|
|
|
|
|
added_urls.append( item.url)
|
|
|
|
if item.guid:
|
|
|
|
added_guids.append( item.guid)
|
|
|
|
|
|
|
|
episodes.sort( reverse = True)
|
2006-12-09 01:41:58 +01:00
|
|
|
|
|
|
|
return episodes
|
|
|
|
|
2007-07-05 23:07:16 +02:00
|
|
|
def items_liststore( self, want_color = True, downloading_callback = None, download_status_manager = None):
|
2006-12-06 21:25:26 +01:00
|
|
|
"""Return a gtk.ListStore containing episodes for this channel
|
|
|
|
|
|
|
|
If want_color is True (the default), this will set special colors
|
|
|
|
for already downloaded episodes and download-in-progress episodes.
|
|
|
|
|
|
|
|
If downloading_callback is set, this should be a function that takes
|
|
|
|
the URL of the episodes and returns True if the episode is currently
|
|
|
|
being downloaded and False otherwise.
|
|
|
|
"""
|
2007-04-03 13:21:12 +02:00
|
|
|
new_model = gtk.ListStore( gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
|
2007-01-28 10:21:39 +01:00
|
|
|
gl = libgpodder.gPodderLib()
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2007-07-05 23:07:16 +02:00
|
|
|
new_episodes = self.get_new_episodes( download_status_manager = download_status_manager)
|
2007-04-03 08:27:46 +02:00
|
|
|
|
2007-04-03 13:21:12 +02:00
|
|
|
index = 1
|
2006-12-09 01:41:58 +01:00
|
|
|
for item in self.get_all_episodes():
|
2007-04-03 13:21:12 +02:00
|
|
|
played_icon = None
|
2006-12-09 01:41:58 +01:00
|
|
|
if self.is_downloaded( item) and want_color:
|
2007-04-03 13:44:45 +02:00
|
|
|
if not libgpodder.gPodderLib().history_is_played( item.url):
|
2007-04-03 13:21:12 +02:00
|
|
|
played_icon = gtk.STOCK_YES
|
|
|
|
|
2007-04-03 08:27:46 +02:00
|
|
|
file_type = self.get_file_type( item)
|
|
|
|
if file_type == 'audio':
|
|
|
|
status_icon = 'audio-x-generic'
|
|
|
|
elif file_type == 'video':
|
|
|
|
status_icon = 'video-x-generic'
|
2007-04-09 21:40:36 +02:00
|
|
|
elif file_type == 'torrent':
|
|
|
|
status_icon = 'applications-internet'
|
2007-04-03 08:27:46 +02:00
|
|
|
else:
|
|
|
|
status_icon = 'unknown'
|
2006-12-09 01:41:58 +01:00
|
|
|
elif downloading_callback and downloading_callback( item.url) and want_color:
|
2007-04-03 08:27:46 +02:00
|
|
|
status_icon = gtk.STOCK_GO_DOWN
|
2006-12-09 01:41:58 +01:00
|
|
|
elif libgpodder.gPodderLib().history_is_downloaded( item.url) and want_color:
|
2007-04-03 08:27:46 +02:00
|
|
|
status_icon = gtk.STOCK_DELETE
|
2007-07-05 23:07:16 +02:00
|
|
|
elif item.url in [ e.url for e in new_episodes ]:
|
2007-04-03 13:21:12 +02:00
|
|
|
status_icon = gtk.STOCK_NEW
|
2006-12-09 01:41:58 +01:00
|
|
|
else:
|
2007-04-03 08:27:46 +02:00
|
|
|
status_icon = None
|
2006-12-09 01:41:58 +01:00
|
|
|
new_iter = new_model.append()
|
|
|
|
new_model.set( new_iter, 0, item.url)
|
|
|
|
new_model.set( new_iter, 1, item.title)
|
|
|
|
new_model.set( new_iter, 2, item.getSize())
|
|
|
|
new_model.set( new_iter, 3, True)
|
2007-04-03 08:27:46 +02:00
|
|
|
new_model.set( new_iter, 4, status_icon)
|
2006-12-09 01:41:58 +01:00
|
|
|
new_model.set( new_iter, 5, item.cute_pubdate())
|
|
|
|
new_model.set( new_iter, 6, item.one_line_description())
|
2007-04-03 08:27:46 +02:00
|
|
|
new_model.set( new_iter, 7, item.description)
|
2007-04-03 13:21:12 +02:00
|
|
|
new_model.set( new_iter, 8, played_icon)
|
|
|
|
index += 1
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
return new_model
|
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
def find_episode( self, url):
|
|
|
|
for item in self.get_all_episodes():
|
|
|
|
if url == item.url:
|
|
|
|
return item
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
return None
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2007-03-10 16:57:56 +01:00
|
|
|
def downloadRss( self, force_update = True, callback_error = None, callback_is_cancelled = None):
|
|
|
|
if callback_is_cancelled:
|
|
|
|
if callback_is_cancelled() == True:
|
|
|
|
return self.cache_file
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
if not exists( self.cache_file) or force_update:
|
2006-03-29 13:51:25 +02:00
|
|
|
# remove old cache file
|
2007-03-08 11:48:09 +01:00
|
|
|
self.remove_cache_file()
|
2006-03-03 21:04:25 +01:00
|
|
|
event = Event()
|
2007-03-10 16:57:56 +01:00
|
|
|
download_thread = downloadThread( self.url, self.cache_file, event)
|
|
|
|
download_thread.download()
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2007-03-10 16:57:56 +01:00
|
|
|
while not event.isSet():
|
|
|
|
if callback_is_cancelled:
|
|
|
|
if callback_is_cancelled() == True:
|
|
|
|
download_thread.cancel()
|
|
|
|
self.restore_cache_file()
|
2006-03-03 21:04:25 +01:00
|
|
|
event.wait( 0.2)
|
2006-03-29 13:51:25 +02:00
|
|
|
|
|
|
|
# check if download was a success
|
2007-03-08 11:48:09 +01:00
|
|
|
if not exists( self.cache_file):
|
|
|
|
log('(downloadRss) Download failed! Trying to restore cache file..')
|
|
|
|
restored = self.restore_cache_file()
|
|
|
|
if callback_error:
|
|
|
|
if restored:
|
2007-06-06 13:56:07 +02:00
|
|
|
callback_error( _('Error downloading %s. Using cached file instead.') % ( unquote( self.url), ))
|
2007-03-08 11:48:09 +01:00
|
|
|
else:
|
2007-06-06 13:56:07 +02:00
|
|
|
callback_error( _('Error downloading %s.') % ( unquote( self.url), ))
|
2007-03-08 11:48:09 +01:00
|
|
|
return restored
|
2006-03-03 21:04:25 +01:00
|
|
|
|
|
|
|
return self.cache_file
|
|
|
|
|
|
|
|
def get_save_dir(self):
|
2006-12-06 21:25:26 +01:00
|
|
|
save_dir = os.path.join( libgpodder.gPodderLib().downloaddir, self.filename ) + '/'
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
# Create save_dir if it does not yet exist
|
2007-08-07 20:11:31 +02:00
|
|
|
if not util.make_directory( save_dir):
|
2006-12-06 21:25:26 +01:00
|
|
|
log( '(libpodcasts) Could not create: %s', save_dir)
|
2006-04-14 14:56:16 +02:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
return save_dir
|
|
|
|
|
|
|
|
save_dir = property(fget=get_save_dir)
|
2006-03-03 21:04:25 +01:00
|
|
|
|
|
|
|
def get_cache_file(self):
|
2006-12-06 21:25:26 +01:00
|
|
|
return libgpodder.gPodderLib().cachedir + self.filename + '.xml'
|
2006-03-03 21:04:25 +01:00
|
|
|
|
|
|
|
cache_file = property(fget=get_cache_file)
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2007-03-08 11:48:09 +01:00
|
|
|
def get_cache_backup_file( self):
|
|
|
|
return libgpodder.gPodderLib().cachedir + self.filename + '.bak'
|
|
|
|
|
|
|
|
cache_backup_file = property(fget=get_cache_backup_file)
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def remove_cache_file( self):
|
2007-03-08 11:48:09 +01:00
|
|
|
if exists( self.cache_file):
|
|
|
|
shutil.copyfile( self.cache_file, self.cache_backup_file)
|
|
|
|
|
2007-08-07 20:11:31 +02:00
|
|
|
util.delete_file( self.cache_file)
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2007-03-08 11:48:09 +01:00
|
|
|
def restore_cache_file( self):
|
|
|
|
if exists( self.cache_backup_file):
|
|
|
|
shutil.copyfile( self.cache_backup_file, self.cache_file)
|
|
|
|
log('Successfully restored cache file from old backup :)')
|
|
|
|
return self.cache_file
|
|
|
|
|
|
|
|
log('Could not restore cache file, sorry..')
|
|
|
|
return None
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def remove_downloaded( self):
|
|
|
|
shutil.rmtree( self.save_dir, True)
|
2006-03-03 21:04:25 +01:00
|
|
|
|
|
|
|
def get_index_file(self):
|
|
|
|
# gets index xml filename for downloaded channels list
|
2006-12-06 21:25:26 +01:00
|
|
|
return os.path.join( self.save_dir, 'index.xml')
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
index_file = property(fget=get_index_file)
|
2006-03-29 14:41:34 +02:00
|
|
|
|
2006-03-31 18:20:18 +02:00
|
|
|
def get_cover_file( self):
|
|
|
|
# gets cover filename for cover download cache
|
2006-12-06 21:25:26 +01:00
|
|
|
return os.path.join( self.save_dir, 'cover')
|
2006-03-31 18:20:18 +02:00
|
|
|
|
|
|
|
cover_file = property(fget=get_cover_file)
|
2007-04-03 08:27:46 +02:00
|
|
|
|
2007-04-09 21:40:36 +02:00
|
|
|
def get_torrent_filename( self, torrent_file):
|
|
|
|
header = open( torrent_file).readline()
|
|
|
|
try:
|
|
|
|
# A crummy way to see if we really are dealing with a torrent file
|
|
|
|
# using index to find values like name and pieces which hopefully
|
|
|
|
# only show up in torrent files (else raise a ValueError)
|
|
|
|
testvar = header.index("6:pieces")
|
|
|
|
name_length_pos = int(header.index("4:name")) + 6
|
|
|
|
# Find the filename for fun + this will add some extra verification
|
|
|
|
colon_pos = int(header.find(":",name_length_pos))
|
|
|
|
name_length = int(header[name_length_pos:colon_pos]) + 1
|
|
|
|
name = header[(colon_pos + 1):(colon_pos + name_length)]
|
|
|
|
return name
|
|
|
|
except:
|
|
|
|
return None
|
|
|
|
|
2007-04-03 08:27:46 +02:00
|
|
|
def get_file_type( self, item):
|
|
|
|
types = {
|
|
|
|
'audio': [ 'mp3', 'ogg', 'wav', 'wma', 'aac', 'm4a' ],
|
2007-04-09 21:40:36 +02:00
|
|
|
'video': [ 'mp4', 'avi', 'mpg', 'mpeg', 'm4v', 'mov' ],
|
|
|
|
'torrent': [ 'torrent' ]
|
2007-04-03 08:27:46 +02:00
|
|
|
}
|
|
|
|
extension = splitext( self.getPodcastFilename( item.url))[1][1:]
|
|
|
|
|
2007-04-09 21:40:36 +02:00
|
|
|
# Torrent file detection
|
|
|
|
if self.get_torrent_filename( self.getPodcastFilename( item.url)) != None:
|
|
|
|
return 'torrent'
|
|
|
|
|
2007-04-03 08:27:46 +02:00
|
|
|
for type in types:
|
|
|
|
if extension in types[type]:
|
|
|
|
return type
|
2007-04-09 21:40:36 +02:00
|
|
|
|
2007-04-03 08:27:46 +02:00
|
|
|
return 'unknown'
|
2006-03-31 18:20:18 +02:00
|
|
|
|
2006-03-29 14:41:34 +02:00
|
|
|
def getPodcastFilename( self, url):
|
|
|
|
# strip question mark (and everything behind it), fix %20 errors
|
2006-12-06 21:25:26 +01:00
|
|
|
filename = basename( url).replace( '%20', ' ')
|
|
|
|
indexOfQuestionMark = filename.rfind( '?')
|
2006-03-29 14:41:34 +02:00
|
|
|
if indexOfQuestionMark != -1:
|
|
|
|
filename = filename[:indexOfQuestionMark]
|
|
|
|
# end strip questionmark
|
2006-04-04 00:27:57 +02:00
|
|
|
extension = splitext( filename)[1].lower()
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
return self.save_dir + md5.new(url).hexdigest() + extension
|
2006-03-29 14:41:34 +02:00
|
|
|
|
|
|
|
def podcastFilenameExists( self, url):
|
|
|
|
return exists( self.getPodcastFilename( url))
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def delete_episode_by_url(self, url):
|
|
|
|
log( 'Delete %s', url)
|
2006-03-24 20:08:59 +01:00
|
|
|
# no multithreaded access
|
|
|
|
libgpodder.getLock()
|
2006-12-06 21:25:26 +01:00
|
|
|
|
|
|
|
new_localdb = self.localdb_channel
|
|
|
|
|
|
|
|
for item in new_localdb:
|
|
|
|
if item.url == url:
|
|
|
|
new_localdb.remove(item)
|
|
|
|
|
|
|
|
self.localdb_channel = new_localdb
|
|
|
|
|
2007-03-14 20:35:15 +01:00
|
|
|
# clean-up downloaded file
|
|
|
|
if self.podcastFilenameExists( url):
|
|
|
|
episode_filename = self.getPodcastFilename( url)
|
2007-08-07 20:11:31 +02:00
|
|
|
util.delete_file( episode_filename)
|
2007-03-14 20:35:15 +01:00
|
|
|
|
2006-03-24 20:08:59 +01:00
|
|
|
libgpodder.releaseLock()
|
|
|
|
|
2007-07-19 14:44:12 +02:00
|
|
|
def obfuscate_password(self, password, unobfuscate = False):
|
|
|
|
if unobfuscate:
|
|
|
|
translation_table = string.maketrans(self.map_to + self.map_to.upper(), self.map_from + self.map_from.upper())
|
|
|
|
else:
|
|
|
|
translation_table = string.maketrans(self.map_from + self.map_from.upper(), self.map_to + self.map_to.upper())
|
|
|
|
try:
|
|
|
|
# For now at least, only ascii passwords will work, non-ascii passwords will be stored in plaintext :-(
|
|
|
|
return string.translate(password.encode('ascii'), translation_table)
|
|
|
|
except:
|
|
|
|
return password
|
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
class podcastItem(object):
|
|
|
|
"""holds data for one object in a channel"""
|
|
|
|
def __init__( self,
|
|
|
|
url = "",
|
|
|
|
title = "",
|
|
|
|
length = "0",
|
|
|
|
mimetype = "",
|
|
|
|
guid = "",
|
|
|
|
description = "",
|
2006-04-08 16:40:19 +02:00
|
|
|
link = "",
|
|
|
|
pubDate = None):
|
2005-11-21 19:21:25 +01:00
|
|
|
self.url = url
|
|
|
|
self.title = title
|
|
|
|
self.length = length
|
|
|
|
self.mimetype = mimetype
|
|
|
|
self.guid = guid
|
2007-08-07 20:11:31 +02:00
|
|
|
self.description = util.remove_html_tags( description)
|
2005-11-21 19:21:25 +01:00
|
|
|
self.link = ""
|
2006-04-08 16:40:19 +02:00
|
|
|
self.pubDate = pubDate
|
|
|
|
if pubDate == None:
|
|
|
|
self.pubDate = datetime.now().ctime()
|
2006-11-20 12:51:20 +01:00
|
|
|
|
|
|
|
def one_line_description( self):
|
|
|
|
lines = self.description.strip().splitlines()
|
|
|
|
if not lines or lines[0] == '':
|
|
|
|
return _('No description available')
|
|
|
|
else:
|
|
|
|
desc = lines[0].strip()
|
2006-11-22 16:15:32 +01:00
|
|
|
if len( desc) > 84:
|
2006-11-20 12:51:20 +01:00
|
|
|
return desc[:80] + '...'
|
|
|
|
else:
|
|
|
|
return desc
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2007-03-12 12:21:33 +01:00
|
|
|
def __cmp__( self, other):
|
|
|
|
try:
|
|
|
|
timestamp_self = int(mktime_tz( parsedate_tz( self.pubDate)))
|
|
|
|
timestamp_other = int(mktime_tz( parsedate_tz( other.pubDate)))
|
|
|
|
except:
|
2007-03-14 20:35:15 +01:00
|
|
|
# by default, do as if this is not the same
|
|
|
|
# this is here so that comparisons with None
|
|
|
|
# can be allowed (item != None -> True)
|
|
|
|
return -1
|
2007-03-12 12:21:33 +01:00
|
|
|
|
|
|
|
return timestamp_self - timestamp_other
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def compare_pubdate( self, pubdate):
|
|
|
|
try:
|
|
|
|
timestamp_self = int(mktime_tz( parsedate_tz( self.pubDate)))
|
|
|
|
except:
|
|
|
|
return -1
|
|
|
|
|
|
|
|
try:
|
|
|
|
timestamp_other = int(mktime_tz( parsedate_tz( pubdate)))
|
|
|
|
except:
|
|
|
|
return 1
|
|
|
|
|
|
|
|
return timestamp_self - timestamp_other
|
|
|
|
|
|
|
|
def newer_pubdate( self, pubdate = None):
|
|
|
|
if self.compare_pubdate( pubdate) > 0:
|
|
|
|
return self.pubDate
|
|
|
|
else:
|
|
|
|
return pubdate
|
2006-04-10 18:46:50 +02:00
|
|
|
|
|
|
|
def cute_pubdate( self):
|
|
|
|
seconds_in_a_day = 86400
|
|
|
|
try:
|
|
|
|
timestamp = int(mktime_tz( parsedate_tz( self.pubDate)))
|
|
|
|
except:
|
|
|
|
return _("(unknown)")
|
2006-04-10 19:07:40 +02:00
|
|
|
diff = int((time()+1)/seconds_in_a_day) - int(timestamp/seconds_in_a_day)
|
2006-04-10 18:46:50 +02:00
|
|
|
|
|
|
|
if diff == 0:
|
|
|
|
return _("Today")
|
|
|
|
if diff == 1:
|
|
|
|
return _("Yesterday")
|
|
|
|
if diff < 7:
|
|
|
|
return str(datetime.fromtimestamp( timestamp).strftime( "%A"))
|
|
|
|
|
|
|
|
return str(datetime.fromtimestamp( timestamp).strftime( "%x"))
|
2006-12-09 01:41:58 +01:00
|
|
|
|
|
|
|
def calculate_filesize( self, channel):
|
|
|
|
try:
|
|
|
|
self.length = str(os.path.getsize( channel.getPodcastFilename( self.url)))
|
|
|
|
except:
|
|
|
|
log( 'Could not get filesize for %s.', self.url)
|
2006-04-10 18:46:50 +02:00
|
|
|
|
2006-04-03 21:43:59 +02:00
|
|
|
def equals( self, other_item):
|
|
|
|
if other_item == None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return self.url == other_item.url
|
2006-08-02 20:24:48 +02:00
|
|
|
|
|
|
|
def get_title( self):
|
|
|
|
return self.__title
|
|
|
|
|
|
|
|
def set_title( self, value):
|
|
|
|
self.__title = value.strip()
|
|
|
|
|
|
|
|
title = property(fget=get_title,
|
|
|
|
fset=set_title)
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
def getSize( self):
|
2006-04-08 14:43:34 +02:00
|
|
|
try:
|
|
|
|
size = int( self.length)
|
|
|
|
except ValueError:
|
2006-12-06 21:25:26 +01:00
|
|
|
return '-'
|
2006-12-20 17:38:36 +01:00
|
|
|
|
2007-08-07 20:11:31 +02:00
|
|
|
return util.format_filesize( size)
|
2006-04-08 14:43:34 +02:00
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-06-13 23:00:31 +02:00
|
|
|
|
|
|
|
class opmlChannel(object):
|
2007-07-11 20:12:02 +02:00
|
|
|
def __init__( self, xmlurl, title = 'Unknown OPML Channel', description = ''):
|
2006-06-13 23:00:31 +02:00
|
|
|
self.title = title
|
|
|
|
self.xmlurl = xmlurl
|
2007-07-11 20:12:02 +02:00
|
|
|
self.description = description
|
2006-06-13 23:00:31 +02:00
|
|
|
|
|
|
|
|
2006-12-08 21:58:30 +01:00
|
|
|
class DownloadHistory( ListType):
|
|
|
|
def __init__( self, filename):
|
|
|
|
self.filename = filename
|
|
|
|
try:
|
|
|
|
self.read_from_file()
|
|
|
|
except:
|
2007-04-03 13:21:12 +02:00
|
|
|
log( 'Creating new history list.', sender = self)
|
2006-12-08 21:58:30 +01:00
|
|
|
|
|
|
|
def read_from_file( self):
|
|
|
|
for line in open( self.filename, 'r'):
|
|
|
|
self.append( line.strip())
|
|
|
|
|
|
|
|
def save_to_file( self):
|
|
|
|
if len( self):
|
|
|
|
fp = open( self.filename, 'w')
|
|
|
|
for url in self:
|
|
|
|
fp.write( url + "\n")
|
|
|
|
fp.close()
|
2007-04-03 13:21:12 +02:00
|
|
|
log( 'Wrote %d history entries.', len( self), sender = self)
|
2006-12-08 21:58:30 +01:00
|
|
|
|
2007-04-03 13:21:12 +02:00
|
|
|
def add_item( self, data, autosave = True):
|
2006-12-08 21:58:30 +01:00
|
|
|
affected = 0
|
|
|
|
if data and type( data) is ListType:
|
|
|
|
# Support passing a list of urls to this function
|
|
|
|
for url in data:
|
2007-04-03 13:21:12 +02:00
|
|
|
affected = affected + self.add_item( url, autosave = False)
|
2006-12-08 21:58:30 +01:00
|
|
|
else:
|
|
|
|
if data not in self:
|
2007-04-03 13:21:12 +02:00
|
|
|
log( 'Adding: %s', data, sender = self)
|
2006-12-08 21:58:30 +01:00
|
|
|
self.append( data)
|
|
|
|
affected = affected + 1
|
|
|
|
|
|
|
|
if affected and autosave:
|
|
|
|
self.save_to_file()
|
|
|
|
|
|
|
|
return affected
|
|
|
|
|
|
|
|
|
2007-04-03 13:21:12 +02:00
|
|
|
class PlaybackHistory( DownloadHistory):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2007-07-05 23:07:16 +02:00
|
|
|
def channelsToModel( channels, download_status_manager = None):
|
|
|
|
new_model = gtk.ListStore( gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_INT, gobject.TYPE_STRING, gtk.gdk.Pixbuf)
|
|
|
|
pos = 0
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
for channel in channels:
|
2007-07-05 23:07:16 +02:00
|
|
|
new_episodes = channel.get_new_episodes( download_status_manager = download_status_manager)
|
|
|
|
count = len(channel)
|
|
|
|
count_new = len(new_episodes)
|
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
new_iter = new_model.append()
|
|
|
|
new_model.set( new_iter, 0, channel.url)
|
2006-12-06 21:25:26 +01:00
|
|
|
new_model.set( new_iter, 1, channel.title)
|
2007-07-05 23:07:16 +02:00
|
|
|
|
|
|
|
new_model.set( new_iter, 2, count)
|
|
|
|
if count_new == 0:
|
|
|
|
new_model.set( new_iter, 3, '')
|
|
|
|
elif count_new == 1:
|
|
|
|
new_model.set( new_iter, 3, _('New episode: %s') % ( new_episodes[-1].title ) + ' ')
|
|
|
|
else:
|
|
|
|
new_model.set( new_iter, 3, _('%s new episodes') % count_new + ' ')
|
|
|
|
|
|
|
|
if count_new:
|
|
|
|
new_model.set( new_iter, 4, pango.WEIGHT_BOLD)
|
|
|
|
new_model.set( new_iter, 5, str(count_new))
|
|
|
|
else:
|
|
|
|
new_model.set( new_iter, 4, pango.WEIGHT_NORMAL)
|
|
|
|
new_model.set( new_iter, 5, '')
|
|
|
|
|
|
|
|
new_model.set( new_iter, 6, pos)
|
|
|
|
|
|
|
|
new_model.set( new_iter, 7, '%s\n<small>%s</small>' % ( saxutils.escape( channel.title), saxutils.escape( channel.description.split('\n')[0]), ))
|
|
|
|
|
2007-08-19 09:23:02 +02:00
|
|
|
channel_cover_found = False
|
2007-07-19 14:44:12 +02:00
|
|
|
if os.path.exists( channel.cover_file) and os.path.getsize(channel.cover_file) > 0:
|
2007-08-19 09:23:02 +02:00
|
|
|
try:
|
|
|
|
new_model.set( new_iter, 8, gtk.gdk.pixbuf_new_from_file_at_size( channel.cover_file, 32, 32))
|
|
|
|
channel_cover_found = True
|
|
|
|
except:
|
|
|
|
exctype, value = sys.exc_info()[:2]
|
|
|
|
log( 'Could not convert icon file "%s", error was "%s"', channel.cover_file, value )
|
|
|
|
|
|
|
|
if not channel_cover_found:
|
2007-07-05 23:07:16 +02:00
|
|
|
iconsize = gtk.icon_size_from_name('channel-icon')
|
|
|
|
if not iconsize:
|
|
|
|
iconsize = gtk.icon_size_register('channel-icon',32,32)
|
|
|
|
icon_theme = gtk.icon_theme_get_default()
|
2007-08-10 15:00:43 +02:00
|
|
|
globe_icon_name = 'applications-internet'
|
|
|
|
try:
|
|
|
|
new_model.set( new_iter, 8, icon_theme.load_icon(globe_icon_name, iconsize, 0))
|
|
|
|
except:
|
|
|
|
log( 'Cannot load "%s" icon (using an old or incomplete icon theme?)', globe_icon_name)
|
|
|
|
new_model.set( new_iter, 8, None)
|
2007-07-05 23:07:16 +02:00
|
|
|
|
|
|
|
pos = pos + 1
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
return new_model
|
|
|
|
|