2007-08-29 20:30:26 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2005-11-21 19:21:25 +01:00
|
|
|
#
|
2007-08-29 20:30:26 +02:00
|
|
|
# gPodder - A media aggregator and podcast client
|
2006-12-29 16:52:52 +01:00
|
|
|
# Copyright (C) 2005-2007 Thomas Perl <thp at perli.net>
|
2005-11-21 19:21:25 +01:00
|
|
|
#
|
2007-08-29 20:30:26 +02:00
|
|
|
# gPodder is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
2006-04-07 22:22:30 +02:00
|
|
|
#
|
2007-08-29 20:30:26 +02:00
|
|
|
# gPodder is distributed in the hope that it will be useful,
|
2006-04-07 22:22:30 +02:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2007-08-29 20:30:26 +02:00
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2006-04-07 22:22:30 +02:00
|
|
|
#
|
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
#
|
|
|
|
# libpodcasts.py -- data classes for gpodder
|
|
|
|
# thomas perl <thp@perli.net> 20051029
|
|
|
|
#
|
|
|
|
#
|
|
|
|
|
|
|
|
import gtk
|
|
|
|
import gobject
|
2007-07-05 23:07:16 +02:00
|
|
|
import pango
|
2007-08-07 20:11:31 +02:00
|
|
|
|
|
|
|
from gpodder import util
|
2007-08-20 15:45:46 +02:00
|
|
|
from gpodder import opml
|
|
|
|
from gpodder import cache
|
2007-08-24 16:49:41 +02:00
|
|
|
from gpodder import services
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-11-17 15:26:10 +01:00
|
|
|
from liblogger import log
|
2006-02-04 11:37:23 +01:00
|
|
|
import libgpodder
|
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
from os.path import exists
|
2006-03-29 14:41:34 +02:00
|
|
|
from os.path import basename
|
2006-12-06 21:25:26 +01:00
|
|
|
import os.path
|
|
|
|
import os
|
|
|
|
import glob
|
|
|
|
import shutil
|
2007-08-19 09:23:02 +02:00
|
|
|
import sys
|
2007-08-22 01:00:49 +02:00
|
|
|
import urllib
|
|
|
|
import urlparse
|
2007-08-30 20:49:53 +02:00
|
|
|
import time
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2006-04-08 16:40:19 +02:00
|
|
|
from datetime import datetime
|
2006-04-08 09:22:30 +02:00
|
|
|
|
2006-02-04 11:37:23 +01:00
|
|
|
from liblocdbwriter import writeLocalDB
|
|
|
|
from liblocdbreader import readLocalDB
|
|
|
|
|
2007-03-15 22:33:23 +01:00
|
|
|
from libtagupdate import update_metadata_on_file
|
2007-03-17 13:11:13 +01:00
|
|
|
from libtagupdate import tagging_supported
|
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
from threading import Event
|
|
|
|
from libwget import downloadThread
|
2006-02-26 00:13:29 +01:00
|
|
|
import re
|
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
from types import ListType
|
2006-04-10 18:46:50 +02:00
|
|
|
from email.Utils import mktime_tz
|
|
|
|
from email.Utils import parsedate_tz
|
|
|
|
|
2007-07-05 23:07:16 +02:00
|
|
|
from xml.sax import saxutils
|
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
from xml.sax import make_parser
|
|
|
|
|
2006-04-04 00:27:57 +02:00
|
|
|
import md5
|
|
|
|
|
2007-07-19 14:44:12 +02:00
|
|
|
import string
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
import shelve
|
|
|
|
|
|
|
|
class ChannelSettings(object):
|
|
|
|
storage = shelve.open( libgpodder.gPodderLib().channel_settings_file)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_settings_by_url( cls, url):
|
|
|
|
if isinstance( url, unicode):
|
|
|
|
url = url.encode('utf-8')
|
|
|
|
log( 'Trying to get settings for %s', url)
|
|
|
|
if cls.storage.has_key( url):
|
|
|
|
return cls.storage[url]
|
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def set_settings_by_url( cls, url, settings):
|
|
|
|
if isinstance( url, unicode):
|
|
|
|
url = url.encode('utf-8')
|
|
|
|
log( 'Saving settings for %s', url)
|
|
|
|
cls.storage[url] = settings
|
|
|
|
cls.storage.sync()
|
|
|
|
|
|
|
|
|
2006-04-08 09:22:30 +02:00
|
|
|
class podcastChannel(ListType):
|
2006-03-03 21:04:25 +01:00
|
|
|
"""holds data for a complete channel"""
|
2007-08-20 15:45:46 +02:00
|
|
|
MAP_FROM = 'abcdefghijklmnopqrstuvwxyz0123456789'
|
|
|
|
MAP_TO = 'qazwsxedcrfvtgbyhnujmikolp9514738062'
|
|
|
|
SETTINGS = ('sync_to_devices', 'is_music_channel', 'device_playlist_name','override_title','username','password')
|
2007-08-25 08:11:19 +02:00
|
|
|
icon_cache = {}
|
2007-08-20 15:45:46 +02:00
|
|
|
|
|
|
|
storage = shelve.open( libgpodder.gPodderLib().feed_cache_file)
|
|
|
|
fc = cache.Cache( storage)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_by_url( cls, url, force_update = False):
|
|
|
|
if isinstance( url, unicode):
|
|
|
|
url = url.encode('utf-8')
|
|
|
|
|
|
|
|
c = cls.fc.fetch( url, force_update)
|
|
|
|
channel = podcastChannel( url)
|
|
|
|
channel.title = c.feed.title
|
|
|
|
if hasattr( c.feed, 'link'):
|
|
|
|
channel.link = c.feed.link
|
|
|
|
if hasattr( c.feed, 'subtitle'):
|
|
|
|
channel.description = util.remove_html_tags( c.feed.subtitle)
|
|
|
|
|
2007-08-30 20:49:53 +02:00
|
|
|
if hasattr( c.feed, 'updated_parsed'):
|
|
|
|
channel.pubDate = util.updated_parsed_to_rfc2822( c.feed.updated_parsed)
|
2007-08-20 15:45:46 +02:00
|
|
|
if hasattr( c.feed, 'image'):
|
|
|
|
if c.feed.image.href:
|
|
|
|
channel.image = c.feed.image.href
|
|
|
|
|
|
|
|
for entry in c.entries:
|
2007-08-31 23:40:15 +02:00
|
|
|
if not hasattr( entry, 'enclosures'):
|
|
|
|
log('Skipping entry: %s', entry.get( 'id', '(no id available)'))
|
2007-08-20 15:45:46 +02:00
|
|
|
continue
|
|
|
|
|
2007-08-25 17:40:18 +02:00
|
|
|
episode = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
episode = podcastItem.from_feedparser_entry( entry, channel)
|
|
|
|
except:
|
|
|
|
log( 'Cannot instantiate episode for %s. Skipping.', entry.enclosures[0].href, sender = channel)
|
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
if episode:
|
|
|
|
channel.append( episode)
|
2007-08-25 08:11:19 +02:00
|
|
|
|
|
|
|
channel.sort( reverse = True)
|
2007-08-20 15:45:46 +02:00
|
|
|
|
|
|
|
cls.storage.sync()
|
|
|
|
return channel
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def create_from_dict( d, load_items = True, force_update = False, callback_error = None):
|
|
|
|
if load_items:
|
|
|
|
try:
|
|
|
|
return podcastChannel.get_by_url( d['url'], force_update = force_update)
|
|
|
|
except:
|
|
|
|
callback_error and callback_error( _('Could not load channel feed from URL: %s') % d['url'])
|
|
|
|
log( 'Cannot load podcastChannel from URL: %s', d['url'])
|
|
|
|
|
|
|
|
c = podcastChannel()
|
|
|
|
for key in ( 'url', 'title', 'description' ):
|
|
|
|
if key in d:
|
|
|
|
setattr( c, key, d[key])
|
|
|
|
|
|
|
|
return c
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
def __init__( self, url = "", title = "", link = "", description = ""):
|
|
|
|
self.url = url
|
|
|
|
self.title = title
|
|
|
|
self.link = link
|
2007-08-07 20:11:31 +02:00
|
|
|
self.description = util.remove_html_tags( description)
|
2006-03-03 21:04:25 +01:00
|
|
|
self.image = None
|
2007-08-30 20:49:53 +02:00
|
|
|
self.pubDate = ''
|
2006-03-03 21:04:25 +01:00
|
|
|
self.downloaded = None
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2006-04-07 03:43:06 +02:00
|
|
|
# should this channel be synced to devices? (ex: iPod)
|
|
|
|
self.sync_to_devices = True
|
2006-04-08 11:09:15 +02:00
|
|
|
# if this is set to true, device syncing (ex: iPod) should treat this as music, not as podcast)
|
|
|
|
self.is_music_channel = False
|
|
|
|
# to which playlist should be synced when "is_music_channel" is true?
|
|
|
|
self.device_playlist_name = 'gPodder'
|
2007-03-08 13:11:10 +01:00
|
|
|
# if set, this overrides the channel-provided title
|
|
|
|
self.override_title = ''
|
2007-07-19 14:44:12 +02:00
|
|
|
self.username = ''
|
|
|
|
self.password = ''
|
2007-08-20 15:45:46 +02:00
|
|
|
|
|
|
|
self.__tree_model = None
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def get_filename( self):
|
|
|
|
"""Return the MD5 sum of the channel URL"""
|
|
|
|
return md5.new( self.url).hexdigest()
|
|
|
|
|
|
|
|
filename = property(fget=get_filename)
|
2006-08-02 20:24:48 +02:00
|
|
|
|
|
|
|
def get_title( self):
|
2007-03-08 13:11:10 +01:00
|
|
|
if self.override_title:
|
|
|
|
return self.override_title
|
|
|
|
elif not self.__title.strip():
|
|
|
|
return self.url
|
|
|
|
else:
|
|
|
|
return self.__title
|
2006-08-02 20:24:48 +02:00
|
|
|
|
|
|
|
def set_title( self, value):
|
|
|
|
self.__title = value.strip()
|
|
|
|
|
|
|
|
title = property(fget=get_title,
|
|
|
|
fset=set_title)
|
2007-03-08 13:11:10 +01:00
|
|
|
|
|
|
|
def set_custom_title( self, custom_title):
|
|
|
|
custom_title = custom_title.strip()
|
|
|
|
|
|
|
|
if custom_title != self.__title:
|
|
|
|
self.override_title = custom_title
|
|
|
|
else:
|
|
|
|
self.override_title = ''
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-04-07 03:43:06 +02:00
|
|
|
def get_localdb_channel( self):
|
|
|
|
try:
|
2006-12-06 21:25:26 +01:00
|
|
|
locdb_reader = readLocalDB( self.url)
|
2006-04-09 14:12:19 +02:00
|
|
|
locdb_reader.parseXML( self.index_file)
|
2006-04-07 03:43:06 +02:00
|
|
|
return locdb_reader.channel
|
|
|
|
except:
|
2006-04-08 11:09:15 +02:00
|
|
|
return podcastChannel( self.url, self.title, self.link, self.description)
|
2006-04-07 03:43:06 +02:00
|
|
|
|
|
|
|
def set_localdb_channel( self, channel):
|
|
|
|
if channel != None:
|
|
|
|
try:
|
2006-12-06 21:25:26 +01:00
|
|
|
log( 'Setting localdb channel data')
|
2006-04-07 03:43:06 +02:00
|
|
|
writeLocalDB( self.index_file, channel)
|
|
|
|
except:
|
2006-11-17 15:26:10 +01:00
|
|
|
log( 'Cannot save channel in set_localdb_channel( %s)', channel.title)
|
2006-12-06 21:25:26 +01:00
|
|
|
|
|
|
|
localdb_channel = property(fget=get_localdb_channel,
|
|
|
|
fset=set_localdb_channel)
|
2006-04-07 03:43:06 +02:00
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
def load_settings( self):
|
|
|
|
settings = ChannelSettings.get_settings_by_url( self.url)
|
2006-04-07 03:43:06 +02:00
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
for key in self.SETTINGS:
|
|
|
|
if settings.has_key( key):
|
|
|
|
setattr( self, key, settings[key])
|
|
|
|
|
|
|
|
def save_settings( self):
|
|
|
|
settings = {}
|
|
|
|
for key in self.SETTINGS:
|
|
|
|
settings[key] = getattr( self, key)
|
2006-04-07 03:43:06 +02:00
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
ChannelSettings.set_settings_by_url( self.url, settings)
|
2006-12-06 21:25:26 +01:00
|
|
|
|
|
|
|
def newest_pubdate_downloaded( self):
|
2006-12-08 21:58:30 +01:00
|
|
|
gl = libgpodder.gPodderLib()
|
|
|
|
|
|
|
|
# Try DownloadHistory's entries first
|
|
|
|
for episode in self:
|
2006-12-13 01:28:26 +01:00
|
|
|
if gl.history_is_downloaded( episode.url):
|
|
|
|
return episode.pubDate
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2006-12-08 21:58:30 +01:00
|
|
|
# If nothing found, do pubDate comparison
|
|
|
|
pubdate = None
|
2006-12-06 21:25:26 +01:00
|
|
|
for episode in self.localdb_channel:
|
|
|
|
pubdate = episode.newer_pubdate( pubdate)
|
|
|
|
return pubdate
|
2007-03-12 12:21:33 +01:00
|
|
|
|
2007-08-24 16:49:41 +02:00
|
|
|
def get_new_episodes( self):
|
2007-07-05 23:07:16 +02:00
|
|
|
last_pubdate = self.newest_pubdate_downloaded()
|
|
|
|
gl = libgpodder.gPodderLib()
|
|
|
|
|
|
|
|
if not last_pubdate:
|
|
|
|
return self[0:min(len(self),gl.default_new)]
|
|
|
|
|
|
|
|
new_episodes = []
|
|
|
|
|
|
|
|
for episode in self.get_all_episodes():
|
|
|
|
# episode is older than newest downloaded
|
|
|
|
if episode.compare_pubdate( last_pubdate) < 0:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# episode has been downloaded before
|
2007-08-22 01:00:49 +02:00
|
|
|
if episode.is_downloaded() or gl.history_is_downloaded( episode.url):
|
2007-07-05 23:07:16 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
# download is currently in progress
|
2007-08-24 16:49:41 +02:00
|
|
|
if services.download_status_manager.is_download_in_progress( episode.url):
|
2007-07-05 23:07:16 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
new_episodes.append( episode)
|
|
|
|
|
|
|
|
return new_episodes
|
|
|
|
|
2007-03-12 12:21:33 +01:00
|
|
|
def can_sort_by_pubdate( self):
|
|
|
|
for episode in self:
|
|
|
|
try:
|
|
|
|
mktime_tz(parsedate_tz( episode.pubDate))
|
|
|
|
except:
|
|
|
|
log('Episode %s has non-parseable pubDate. Sorting disabled.', episode.title)
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
2006-04-07 03:43:06 +02:00
|
|
|
|
2006-02-04 11:37:23 +01:00
|
|
|
def addDownloadedItem( self, item):
|
2006-03-24 20:08:59 +01:00
|
|
|
# no multithreaded access
|
2006-03-19 15:21:48 +01:00
|
|
|
libgpodder.getLock()
|
2006-03-04 21:45:01 +01:00
|
|
|
localdb = self.index_file
|
2006-11-17 15:26:10 +01:00
|
|
|
log( 'Local database: %s', localdb)
|
2006-02-04 11:37:23 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
self.downloaded = self.localdb_channel
|
2007-03-15 22:33:23 +01:00
|
|
|
|
2006-04-03 21:43:59 +02:00
|
|
|
already_in_list = False
|
|
|
|
# try to find the new item in the list
|
2006-04-08 09:22:30 +02:00
|
|
|
for it in self.downloaded:
|
2006-04-03 21:43:59 +02:00
|
|
|
if it.equals( item):
|
|
|
|
already_in_list = True
|
|
|
|
break
|
|
|
|
|
|
|
|
# only append if not already in list
|
|
|
|
if not already_in_list:
|
2006-04-08 09:22:30 +02:00
|
|
|
self.downloaded.append( item)
|
2007-03-14 20:35:15 +01:00
|
|
|
writeLocalDB( localdb, self.downloaded)
|
2006-12-08 21:58:30 +01:00
|
|
|
|
2007-03-15 22:33:23 +01:00
|
|
|
# Update metadata on file (if possible and wanted)
|
2007-03-17 13:11:13 +01:00
|
|
|
if libgpodder.gPodderLib().update_tags and tagging_supported():
|
2007-08-22 01:00:49 +02:00
|
|
|
filename = item.local_filename()
|
2007-03-15 22:33:23 +01:00
|
|
|
try:
|
2007-04-01 19:53:04 +02:00
|
|
|
update_metadata_on_file( filename, title = item.title, artist = self.title)
|
2007-03-15 22:33:23 +01:00
|
|
|
except:
|
|
|
|
log('Error while calling update_metadata_on_file() :(')
|
|
|
|
|
2006-12-08 21:58:30 +01:00
|
|
|
libgpodder.gPodderLib().history_mark_downloaded( item.url)
|
2006-04-03 21:43:59 +02:00
|
|
|
|
2007-08-22 01:00:49 +02:00
|
|
|
if item.file_type() == 'torrent':
|
|
|
|
torrent_filename = item.local_filename()
|
|
|
|
destination_filename = util.torrent_filename( torrent_filename)
|
2007-04-09 21:40:36 +02:00
|
|
|
libgpodder.gPodderLib().invoke_torrent( item.url, torrent_filename, destination_filename)
|
|
|
|
|
2006-03-19 15:21:48 +01:00
|
|
|
libgpodder.releaseLock()
|
2006-04-03 21:43:59 +02:00
|
|
|
return not already_in_list
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2007-04-23 17:18:31 +02:00
|
|
|
def is_played(self, item):
|
|
|
|
return libgpodder.gPodderLib().history_is_played( item.url)
|
2005-11-22 14:30:28 +01:00
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
def get_all_episodes( self):
|
|
|
|
episodes = []
|
|
|
|
added_urls = []
|
2007-04-06 20:10:22 +02:00
|
|
|
added_guids = []
|
2006-12-09 01:41:58 +01:00
|
|
|
|
2007-04-06 20:10:22 +02:00
|
|
|
# go through all episodes (both new and downloaded),
|
|
|
|
# prefer already-downloaded (in localdb)
|
|
|
|
for item in [] + self.localdb_channel + self:
|
|
|
|
# skip items with the same guid (if it has a guid)
|
|
|
|
if item.guid and item.guid in added_guids:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# skip items with the same download url
|
|
|
|
if item.url in added_urls:
|
|
|
|
continue
|
|
|
|
|
|
|
|
episodes.append( item)
|
|
|
|
|
|
|
|
added_urls.append( item.url)
|
|
|
|
if item.guid:
|
|
|
|
added_guids.append( item.guid)
|
|
|
|
|
|
|
|
episodes.sort( reverse = True)
|
2006-12-09 01:41:58 +01:00
|
|
|
|
|
|
|
return episodes
|
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
def force_update_tree_model( self):
|
|
|
|
self.__tree_model = None
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2007-08-24 16:49:41 +02:00
|
|
|
def update_model( self):
|
|
|
|
new_episodes = self.get_new_episodes()
|
2007-08-20 15:45:46 +02:00
|
|
|
|
|
|
|
iter = self.tree_model.get_iter_first()
|
|
|
|
while iter != None:
|
2007-08-24 16:49:41 +02:00
|
|
|
self.iter_set_downloading_columns( self.tree_model, iter, new_episodes)
|
2007-08-20 15:45:46 +02:00
|
|
|
iter = self.tree_model.iter_next( iter)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def tree_model( self):
|
|
|
|
if not self.__tree_model:
|
|
|
|
log('Generating TreeModel for %s', self.url, sender = self)
|
|
|
|
self.__tree_model = self.items_liststore()
|
|
|
|
|
|
|
|
return self.__tree_model
|
|
|
|
|
2007-08-24 16:49:41 +02:00
|
|
|
def iter_set_downloading_columns( self, model, iter, new_episodes = []):
|
2007-08-20 15:45:46 +02:00
|
|
|
url = model.get_value( iter, 0)
|
2007-08-25 08:11:19 +02:00
|
|
|
local_filename = model.get_value( iter, 8)
|
|
|
|
played = not libgpodder.gPodderLib().history_is_played( url)
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2007-08-22 01:00:49 +02:00
|
|
|
if os.path.exists( local_filename):
|
|
|
|
file_type = util.file_type_by_extension( util.file_extension_from_url( url))
|
2007-08-20 15:45:46 +02:00
|
|
|
if file_type == 'audio':
|
2007-08-25 08:11:19 +02:00
|
|
|
status_icon = util.get_tree_icon( 'audio-x-generic', played, self.icon_cache)
|
2007-08-20 15:45:46 +02:00
|
|
|
elif file_type == 'video':
|
2007-08-25 08:11:19 +02:00
|
|
|
status_icon = util.get_tree_icon( 'video-x-generic', played, self.icon_cache)
|
2007-08-20 15:45:46 +02:00
|
|
|
elif file_type == 'torrent':
|
2007-08-25 08:11:19 +02:00
|
|
|
status_icon = util.get_tree_icon( 'applications-internet', played, self.icon_cache)
|
2007-08-20 15:45:46 +02:00
|
|
|
else:
|
2007-08-25 08:11:19 +02:00
|
|
|
status_icon = util.get_tree_icon( 'unknown', played, self.icon_cache)
|
2007-08-24 16:49:41 +02:00
|
|
|
elif services.download_status_manager.is_download_in_progress( url):
|
2007-08-25 08:11:19 +02:00
|
|
|
status_icon = util.get_tree_icon( gtk.STOCK_GO_DOWN, icon_cache = self.icon_cache)
|
2007-08-20 15:45:46 +02:00
|
|
|
elif libgpodder.gPodderLib().history_is_downloaded( url):
|
2007-08-25 08:11:19 +02:00
|
|
|
status_icon = util.get_tree_icon( gtk.STOCK_DELETE, icon_cache = self.icon_cache)
|
2007-08-20 15:45:46 +02:00
|
|
|
elif url in [ e.url for e in new_episodes ]:
|
2007-08-25 08:11:19 +02:00
|
|
|
status_icon = util.get_tree_icon( gtk.STOCK_NEW, icon_cache = self.icon_cache)
|
2007-08-20 15:45:46 +02:00
|
|
|
else:
|
|
|
|
status_icon = None
|
|
|
|
|
|
|
|
model.set( iter, 4, status_icon)
|
|
|
|
|
2007-08-24 16:49:41 +02:00
|
|
|
def items_liststore( self):
|
|
|
|
"""
|
|
|
|
Return a gtk.ListStore containing episodes for this channel
|
2006-12-06 21:25:26 +01:00
|
|
|
"""
|
2007-08-25 08:11:19 +02:00
|
|
|
new_model = gtk.ListStore( gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN, gtk.gdk.Pixbuf, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
|
2007-08-24 16:49:41 +02:00
|
|
|
new_episodes = self.get_new_episodes()
|
2007-04-03 08:27:46 +02:00
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
for item in self.get_all_episodes():
|
2007-08-25 08:11:19 +02:00
|
|
|
new_iter = new_model.append( ( item.url, item.title, util.format_filesize( item.length), True, None, item.cute_pubdate(), item.one_line_description(), item.description, item.local_filename() ))
|
2007-08-24 16:49:41 +02:00
|
|
|
self.iter_set_downloading_columns( new_model, new_iter, new_episodes)
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
return new_model
|
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
def find_episode( self, url):
|
|
|
|
for item in self.get_all_episodes():
|
|
|
|
if url == item.url:
|
|
|
|
return item
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-12-09 01:41:58 +01:00
|
|
|
return None
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
def get_save_dir(self):
|
2007-08-22 01:00:49 +02:00
|
|
|
save_dir = os.path.join( libgpodder.gPodderLib().downloaddir, self.filename, '')
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
# Create save_dir if it does not yet exist
|
2007-08-07 20:11:31 +02:00
|
|
|
if not util.make_directory( save_dir):
|
2007-08-22 01:00:49 +02:00
|
|
|
log( 'Could not create save_dir: %s', save_dir, sender = self)
|
2006-04-14 14:56:16 +02:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
return save_dir
|
|
|
|
|
|
|
|
save_dir = property(fget=get_save_dir)
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def remove_downloaded( self):
|
|
|
|
shutil.rmtree( self.save_dir, True)
|
2006-03-03 21:04:25 +01:00
|
|
|
|
|
|
|
def get_index_file(self):
|
|
|
|
# gets index xml filename for downloaded channels list
|
2006-12-06 21:25:26 +01:00
|
|
|
return os.path.join( self.save_dir, 'index.xml')
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
index_file = property(fget=get_index_file)
|
2006-03-29 14:41:34 +02:00
|
|
|
|
2006-03-31 18:20:18 +02:00
|
|
|
def get_cover_file( self):
|
|
|
|
# gets cover filename for cover download cache
|
2006-12-06 21:25:26 +01:00
|
|
|
return os.path.join( self.save_dir, 'cover')
|
2006-03-31 18:20:18 +02:00
|
|
|
|
|
|
|
cover_file = property(fget=get_cover_file)
|
2007-04-03 08:27:46 +02:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def delete_episode_by_url(self, url):
|
|
|
|
log( 'Delete %s', url)
|
2006-03-24 20:08:59 +01:00
|
|
|
# no multithreaded access
|
|
|
|
libgpodder.getLock()
|
2006-12-06 21:25:26 +01:00
|
|
|
|
|
|
|
new_localdb = self.localdb_channel
|
|
|
|
|
|
|
|
for item in new_localdb:
|
|
|
|
if item.url == url:
|
2007-08-30 20:49:53 +02:00
|
|
|
local_filename = item.local_filename()
|
2006-12-06 21:25:26 +01:00
|
|
|
new_localdb.remove(item)
|
|
|
|
|
|
|
|
self.localdb_channel = new_localdb
|
|
|
|
|
2007-03-14 20:35:15 +01:00
|
|
|
# clean-up downloaded file
|
2007-08-30 20:49:53 +02:00
|
|
|
util.delete_file( local_filename)
|
2007-03-14 20:35:15 +01:00
|
|
|
|
2006-03-24 20:08:59 +01:00
|
|
|
libgpodder.releaseLock()
|
|
|
|
|
2007-07-19 14:44:12 +02:00
|
|
|
def obfuscate_password(self, password, unobfuscate = False):
|
|
|
|
if unobfuscate:
|
2007-08-20 15:45:46 +02:00
|
|
|
translation_table = string.maketrans(self.MAP_TO + self.MAP_TO.upper(), self.MAP_FROM + self.MAP_FROM.upper())
|
2007-07-19 14:44:12 +02:00
|
|
|
else:
|
2007-08-20 15:45:46 +02:00
|
|
|
translation_table = string.maketrans(self.MAP_FROM + self.MAP_FROM.upper(), self.MAP_TO + self.MAP_TO.upper())
|
2007-07-19 14:44:12 +02:00
|
|
|
try:
|
|
|
|
# For now at least, only ascii passwords will work, non-ascii passwords will be stored in plaintext :-(
|
|
|
|
return string.translate(password.encode('ascii'), translation_table)
|
|
|
|
except:
|
|
|
|
return password
|
|
|
|
|
2006-03-03 21:04:25 +01:00
|
|
|
class podcastItem(object):
|
|
|
|
"""holds data for one object in a channel"""
|
2007-08-20 15:45:46 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2007-08-22 01:00:49 +02:00
|
|
|
def from_feedparser_entry( entry, channel):
|
|
|
|
episode = podcastItem( channel)
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2007-08-26 20:21:23 +02:00
|
|
|
episode.title = entry.get( 'title', util.get_first_line( util.remove_html_tags( entry.get( 'summary', ''))))
|
|
|
|
episode.link = entry.get( 'link', '')
|
|
|
|
episode.description = util.remove_html_tags( entry.get( 'summary', entry.get( 'link', entry.get( 'title', ''))))
|
2007-08-26 23:56:06 +02:00
|
|
|
episode.guid = entry.get( 'id', '')
|
2007-08-30 20:49:53 +02:00
|
|
|
if entry.get( 'updated_parsed', None):
|
|
|
|
episode.pubDate = util.updated_parsed_to_rfc2822( entry.updated_parsed)
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2007-08-26 20:21:23 +02:00
|
|
|
if episode.title == '':
|
|
|
|
log( 'Warning: Episode has no title, adding anyways.. (Feed Is Buggy!)', sender = episode)
|
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
if len(entry.enclosures) > 1:
|
|
|
|
log( 'Warning: More than one enclosure found in feed, only using first', sender = episode)
|
|
|
|
|
|
|
|
enclosure = entry.enclosures[0]
|
|
|
|
episode.url = enclosure.href
|
2007-08-31 23:40:15 +02:00
|
|
|
if hasattr( enclosure, 'length'):
|
|
|
|
episode.length = enclosure.length
|
|
|
|
if hasattr( enclosure, 'type'):
|
|
|
|
episode.mimetype = enclosure.type
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2007-08-27 00:04:50 +02:00
|
|
|
if episode.title == '':
|
|
|
|
( filename, extension ) = os.path.splitext( os.path.basename( episode.url))
|
|
|
|
episode.title = filename
|
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
return episode
|
|
|
|
|
|
|
|
|
2007-08-22 01:00:49 +02:00
|
|
|
def __init__( self, channel):
|
2007-08-19 15:01:15 +02:00
|
|
|
self.url = ''
|
|
|
|
self.title = ''
|
|
|
|
self.length = 0
|
2007-08-31 23:40:15 +02:00
|
|
|
self.mimetype = 'application/octet-stream'
|
2007-08-19 15:01:15 +02:00
|
|
|
self.guid = ''
|
|
|
|
self.description = ''
|
|
|
|
self.link = ''
|
2007-08-22 01:00:49 +02:00
|
|
|
self.channel = channel
|
2007-08-30 20:49:53 +02:00
|
|
|
self.pubDate = ''
|
2006-11-20 12:51:20 +01:00
|
|
|
|
|
|
|
def one_line_description( self):
|
|
|
|
lines = self.description.strip().splitlines()
|
|
|
|
if not lines or lines[0] == '':
|
|
|
|
return _('No description available')
|
|
|
|
else:
|
|
|
|
desc = lines[0].strip()
|
2006-11-22 16:15:32 +01:00
|
|
|
if len( desc) > 84:
|
2006-11-20 12:51:20 +01:00
|
|
|
return desc[:80] + '...'
|
|
|
|
else:
|
|
|
|
return desc
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2007-08-22 01:00:49 +02:00
|
|
|
def is_downloaded( self):
|
|
|
|
return os.path.exists( self.local_filename())
|
|
|
|
|
|
|
|
def local_filename( self):
|
|
|
|
extension = util.file_extension_from_url( self.url)
|
|
|
|
return os.path.join( self.channel.save_dir, md5.new( self.url).hexdigest() + extension)
|
|
|
|
|
|
|
|
def file_type( self):
|
|
|
|
return util.file_type_by_extension( util.file_extension_from_url( self.url))
|
|
|
|
|
2007-03-12 12:21:33 +01:00
|
|
|
def __cmp__( self, other):
|
|
|
|
try:
|
|
|
|
timestamp_self = int(mktime_tz( parsedate_tz( self.pubDate)))
|
|
|
|
timestamp_other = int(mktime_tz( parsedate_tz( other.pubDate)))
|
|
|
|
except:
|
2007-03-14 20:35:15 +01:00
|
|
|
# by default, do as if this is not the same
|
|
|
|
# this is here so that comparisons with None
|
|
|
|
# can be allowed (item != None -> True)
|
|
|
|
return -1
|
2007-03-12 12:21:33 +01:00
|
|
|
|
|
|
|
return timestamp_self - timestamp_other
|
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
def compare_pubdate( self, pubdate):
|
|
|
|
try:
|
|
|
|
timestamp_self = int(mktime_tz( parsedate_tz( self.pubDate)))
|
|
|
|
except:
|
|
|
|
return -1
|
|
|
|
|
|
|
|
try:
|
|
|
|
timestamp_other = int(mktime_tz( parsedate_tz( pubdate)))
|
|
|
|
except:
|
|
|
|
return 1
|
|
|
|
|
|
|
|
return timestamp_self - timestamp_other
|
|
|
|
|
|
|
|
def newer_pubdate( self, pubdate = None):
|
|
|
|
if self.compare_pubdate( pubdate) > 0:
|
|
|
|
return self.pubDate
|
|
|
|
else:
|
|
|
|
return pubdate
|
2007-08-19 15:01:15 +02:00
|
|
|
|
2006-04-10 18:46:50 +02:00
|
|
|
def cute_pubdate( self):
|
|
|
|
seconds_in_a_day = 86400
|
|
|
|
try:
|
|
|
|
timestamp = int(mktime_tz( parsedate_tz( self.pubDate)))
|
|
|
|
except:
|
|
|
|
return _("(unknown)")
|
2007-08-30 20:49:53 +02:00
|
|
|
diff = int((time.time()+1)/seconds_in_a_day) - int(timestamp/seconds_in_a_day)
|
2006-04-10 18:46:50 +02:00
|
|
|
|
|
|
|
if diff == 0:
|
|
|
|
return _("Today")
|
|
|
|
if diff == 1:
|
|
|
|
return _("Yesterday")
|
|
|
|
if diff < 7:
|
|
|
|
return str(datetime.fromtimestamp( timestamp).strftime( "%A"))
|
|
|
|
|
|
|
|
return str(datetime.fromtimestamp( timestamp).strftime( "%x"))
|
2006-12-09 01:41:58 +01:00
|
|
|
|
2007-08-22 01:00:49 +02:00
|
|
|
def calculate_filesize( self):
|
2006-12-09 01:41:58 +01:00
|
|
|
try:
|
2007-08-22 01:00:49 +02:00
|
|
|
self.length = str(os.path.getsize( self.local_filename()))
|
2006-12-09 01:41:58 +01:00
|
|
|
except:
|
|
|
|
log( 'Could not get filesize for %s.', self.url)
|
2006-04-10 18:46:50 +02:00
|
|
|
|
2006-04-03 21:43:59 +02:00
|
|
|
def equals( self, other_item):
|
|
|
|
if other_item == None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return self.url == other_item.url
|
2006-08-02 20:24:48 +02:00
|
|
|
|
2006-06-13 23:00:31 +02:00
|
|
|
|
2007-04-03 13:21:12 +02:00
|
|
|
|
2007-08-24 16:49:41 +02:00
|
|
|
def channelsToModel( channels):
|
2007-07-05 23:07:16 +02:00
|
|
|
new_model = gtk.ListStore( gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_INT, gobject.TYPE_STRING, gtk.gdk.Pixbuf)
|
|
|
|
pos = 0
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
for channel in channels:
|
2007-08-24 16:49:41 +02:00
|
|
|
new_episodes = channel.get_new_episodes()
|
2007-07-05 23:07:16 +02:00
|
|
|
count = len(channel)
|
|
|
|
count_new = len(new_episodes)
|
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
new_iter = new_model.append()
|
|
|
|
new_model.set( new_iter, 0, channel.url)
|
2006-12-06 21:25:26 +01:00
|
|
|
new_model.set( new_iter, 1, channel.title)
|
2007-07-05 23:07:16 +02:00
|
|
|
|
|
|
|
new_model.set( new_iter, 2, count)
|
|
|
|
if count_new == 0:
|
|
|
|
new_model.set( new_iter, 3, '')
|
|
|
|
elif count_new == 1:
|
|
|
|
new_model.set( new_iter, 3, _('New episode: %s') % ( new_episodes[-1].title ) + ' ')
|
|
|
|
else:
|
|
|
|
new_model.set( new_iter, 3, _('%s new episodes') % count_new + ' ')
|
|
|
|
|
|
|
|
if count_new:
|
|
|
|
new_model.set( new_iter, 4, pango.WEIGHT_BOLD)
|
|
|
|
new_model.set( new_iter, 5, str(count_new))
|
|
|
|
else:
|
|
|
|
new_model.set( new_iter, 4, pango.WEIGHT_NORMAL)
|
|
|
|
new_model.set( new_iter, 5, '')
|
|
|
|
|
|
|
|
new_model.set( new_iter, 6, pos)
|
|
|
|
|
|
|
|
new_model.set( new_iter, 7, '%s\n<small>%s</small>' % ( saxutils.escape( channel.title), saxutils.escape( channel.description.split('\n')[0]), ))
|
|
|
|
|
2007-08-19 09:23:02 +02:00
|
|
|
channel_cover_found = False
|
2007-07-19 14:44:12 +02:00
|
|
|
if os.path.exists( channel.cover_file) and os.path.getsize(channel.cover_file) > 0:
|
2007-08-19 09:23:02 +02:00
|
|
|
try:
|
|
|
|
new_model.set( new_iter, 8, gtk.gdk.pixbuf_new_from_file_at_size( channel.cover_file, 32, 32))
|
|
|
|
channel_cover_found = True
|
|
|
|
except:
|
|
|
|
exctype, value = sys.exc_info()[:2]
|
|
|
|
log( 'Could not convert icon file "%s", error was "%s"', channel.cover_file, value )
|
|
|
|
|
|
|
|
if not channel_cover_found:
|
2007-07-05 23:07:16 +02:00
|
|
|
iconsize = gtk.icon_size_from_name('channel-icon')
|
|
|
|
if not iconsize:
|
|
|
|
iconsize = gtk.icon_size_register('channel-icon',32,32)
|
|
|
|
icon_theme = gtk.icon_theme_get_default()
|
2007-08-10 15:00:43 +02:00
|
|
|
globe_icon_name = 'applications-internet'
|
|
|
|
try:
|
|
|
|
new_model.set( new_iter, 8, icon_theme.load_icon(globe_icon_name, iconsize, 0))
|
|
|
|
except:
|
|
|
|
log( 'Cannot load "%s" icon (using an old or incomplete icon theme?)', globe_icon_name)
|
|
|
|
new_model.set( new_iter, 8, None)
|
2007-07-05 23:07:16 +02:00
|
|
|
|
|
|
|
pos = pos + 1
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
return new_model
|
|
|
|
|
2007-08-20 15:45:46 +02:00
|
|
|
|
|
|
|
|
|
|
|
def load_channels( load_items = True, force_update = False, callback_proc = None, callback_url = None, callback_error = None):
|
|
|
|
importer = opml.Importer( libgpodder.gPodderLib().channel_opml_file)
|
|
|
|
result = []
|
|
|
|
count = 0
|
|
|
|
for item in importer.items:
|
|
|
|
callback_proc and callback_proc( count, len( importer.items))
|
|
|
|
callback_url and callback_url( item['url'])
|
|
|
|
result.append( podcastChannel.create_from_dict( item, load_items = load_items, force_update = force_update, callback_error = callback_error))
|
|
|
|
count += 1
|
|
|
|
return result
|
|
|
|
|
|
|
|
def save_channels( channels):
|
|
|
|
exporter = opml.Exporter( libgpodder.gPodderLib().channel_opml_file)
|
|
|
|
exporter.write( channels)
|
|
|
|
|
|
|
|
|