2007-08-29 20:30:26 +02:00
# -*- coding: utf-8 -*-
2005-11-21 19:21:25 +01:00
#
2007-08-29 20:30:26 +02:00
# gPodder - A media aggregator and podcast client
2009-02-01 21:22:21 +01:00
# Copyright (c) 2005-2009 Thomas Perl and the gPodder Team
2005-11-21 19:21:25 +01:00
#
2007-08-29 20:30:26 +02:00
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
2006-04-07 22:22:30 +02:00
#
2007-08-29 20:30:26 +02:00
# gPodder is distributed in the hope that it will be useful,
2006-04-07 22:22:30 +02:00
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
2007-08-29 20:30:26 +02:00
# along with this program. If not, see <http://www.gnu.org/licenses/>.
2006-04-07 22:22:30 +02:00
#
2005-11-21 19:21:25 +01:00
#
# libpodcasts.py -- data classes for gpodder
# thomas perl <thp@perli.net> 20051029
#
2007-09-15 16:29:37 +02:00
# Contains code based on:
# liblocdbwriter.py (2006-01-09)
# liblocdbreader.py (2006-01-10)
2005-11-21 19:21:25 +01:00
#
import gtk
import gobject
2007-07-05 23:07:16 +02:00
import pango
2007-08-07 20:11:31 +02:00
2008-04-22 21:57:02 +02:00
import gpodder
2007-08-07 20:11:31 +02:00
from gpodder import util
2007-08-20 15:45:46 +02:00
from gpodder import opml
from gpodder import cache
2007-08-24 16:49:41 +02:00
from gpodder import services
2007-11-27 23:04:15 +01:00
from gpodder import draw
2008-04-22 21:57:02 +02:00
from gpodder import libtagupdate
from gpodder import dumbshelve
2008-10-14 18:15:01 +02:00
from gpodder import resolver
2007-09-15 16:29:37 +02:00
2008-03-02 14:22:29 +01:00
from gpodder . liblogger import log
from gpodder . libgpodder import gl
2008-06-30 03:10:18 +02:00
from gpodder . dbsqlite import db
2006-02-04 11:37:23 +01:00
2006-12-06 21:25:26 +01:00
import os . path
import os
import glob
import shutil
2007-08-19 09:23:02 +02:00
import sys
2007-08-22 01:00:49 +02:00
import urllib
import urlparse
2007-08-30 20:49:53 +02:00
import time
2008-04-22 21:57:02 +02:00
import datetime
2008-07-14 18:46:59 +02:00
import rfc822
2008-12-27 13:24:21 +01:00
import hashlib
2008-04-22 21:57:02 +02:00
import xml . dom . minidom
2008-06-17 14:50:27 +02:00
import feedparser
2006-03-03 21:04:25 +01:00
2007-07-05 23:07:16 +02:00
from xml . sax import saxutils
2007-08-20 15:45:46 +02:00
2008-04-22 21:57:45 +02:00
if gpodder . interface == gpodder . MAEMO :
2008-04-06 02:19:03 +02:00
ICON_AUDIO_FILE = ' gnome-mime-audio-mp3 '
ICON_VIDEO_FILE = ' gnome-mime-video-mp4 '
ICON_DOWNLOADING = ' qgn_toolb_messagin_moveto '
ICON_DELETED = ' qgn_toolb_gene_deletebutton '
ICON_NEW = ' qgn_list_gene_favor '
2008-04-22 21:57:02 +02:00
else :
ICON_AUDIO_FILE = ' audio-x-generic '
ICON_VIDEO_FILE = ' video-x-generic '
ICON_DOWNLOADING = gtk . STOCK_GO_DOWN
ICON_DELETED = gtk . STOCK_DELETE
2008-06-30 03:10:18 +02:00
ICON_NEW = gtk . STOCK_ABOUT
2008-03-02 13:56:16 +01:00
2008-10-20 06:17:22 +02:00
class HTTPAuthError ( Exception ) : pass
2008-03-02 13:56:16 +01:00
2009-03-10 14:59:01 +01:00
class PodcastModelObject ( object ) :
"""
A generic base class for our podcast model providing common helper
and utility functions .
"""
@classmethod
def create_from_dict ( cls , d , * args ) :
"""
Create a new object , passing " args " to the constructor
and then updating the object with the values from " d " .
"""
o = cls ( * args )
o . update_from_dict ( d )
return o
def update_from_dict ( self , d ) :
"""
Updates the attributes of this object with values from the
dictionary " d " by using the keys found in " d " .
"""
for k in d :
if hasattr ( self , k ) :
setattr ( self , k , d [ k ] )
class PodcastChannel ( PodcastModelObject ) :
2006-03-03 21:04:25 +01:00
""" holds data for a complete channel """
2009-02-06 15:54:28 +01:00
MAX_FOLDERNAME_LENGTH = 150
2007-08-25 08:11:19 +02:00
icon_cache = { }
2007-08-20 15:45:46 +02:00
2008-06-30 03:10:18 +02:00
fc = cache . Cache ( )
2007-08-20 15:45:46 +02:00
2007-11-12 20:29:53 +01:00
@classmethod
2008-10-20 06:17:22 +02:00
def load ( cls , url , create = True , authentication_tokens = None ) :
2008-06-30 03:10:18 +02:00
if isinstance ( url , unicode ) :
url = url . encode ( ' utf-8 ' )
2007-11-12 20:29:53 +01:00
2009-03-10 14:59:01 +01:00
tmp = db . load_channels ( factory = cls . create_from_dict , url = url )
2008-06-30 03:10:18 +02:00
if len ( tmp ) :
return tmp [ 0 ]
elif create :
2009-03-10 14:59:01 +01:00
tmp = PodcastChannel ( url )
2008-10-20 06:17:22 +02:00
if authentication_tokens is not None :
tmp . username = authentication_tokens [ 0 ]
tmp . password = authentication_tokens [ 1 ]
success , error_code = tmp . update ( )
if not success :
if error_code == 401 :
raise HTTPAuthError
else :
return None
2008-06-30 03:10:18 +02:00
tmp . save ( )
db . force_last_new ( tmp )
return tmp
2008-05-10 13:43:43 +02:00
2009-03-10 14:59:01 +01:00
def episode_factory ( self , d ) :
"""
This function takes a dictionary containing key - value pairs for
episodes and returns a new PodcastEpisode object that is connected
to this PodcastChannel object .
Returns : A new PodcastEpisode object
"""
return PodcastEpisode . create_from_dict ( d , self )
2008-06-30 03:10:18 +02:00
def update ( self ) :
( updated , c ) = self . fc . fetch ( self . url , self )
2007-08-20 15:45:46 +02:00
2008-09-30 22:07:06 +02:00
if c is None :
2008-10-20 06:17:22 +02:00
return ( False , None )
if c . status == 401 :
return ( False , 401 )
2008-09-30 22:07:06 +02:00
2009-03-05 10:20:13 +01:00
if c . status == 302 and hasattr ( c , ' headers ' ) and c . headers . get ( ' content-type ' ) . startswith ( ' text/html ' ) :
log ( ' Warning: Looks like a Wifi authentication page: %s ' , c . url , sender = self )
log ( ' Acting as if the feed was not updated (FIXME!) ' , sender = self )
return ( True , None )
2009-02-18 13:36:33 +01:00
if self . url != c . url and c . status != 302 :
# The URL has changed, and the status code is not a temporary
# redirect, so update the channel's URL accordingly for future use
log ( ' Updating channel URL from %s to %s ' ,
self . url , c . url , sender = self )
2008-09-30 22:07:06 +02:00
self . url = c . url
2008-07-20 02:46:49 +02:00
# update the cover if it's not there
self . update_cover ( )
2008-05-10 13:43:43 +02:00
# If we have an old instance of this channel, and
# feedcache says the feed hasn't changed, return old
2008-06-30 03:10:18 +02:00
if not updated :
log ( ' Channel %s is up to date ' , self . url )
2008-10-20 06:17:22 +02:00
return ( True , None )
2008-06-30 03:10:18 +02:00
# Save etag and last-modified for later reuse
if c . headers . get ( ' etag ' ) :
self . etag = c . headers . get ( ' etag ' )
if c . headers . get ( ' last-modified ' ) :
self . last_modified = c . headers . get ( ' last-modified ' )
self . parse_error = c . get ( ' bozo_exception ' , None )
2008-05-10 13:43:43 +02:00
2008-03-29 16:33:18 +01:00
if hasattr ( c . feed , ' title ' ) :
2008-06-30 03:10:18 +02:00
self . title = c . feed . title
2008-12-04 21:17:43 +01:00
# Start YouTube-specific title FIX
2009-01-26 12:37:21 +01:00
YOUTUBE_PREFIX = ' Videos uploaded by '
2008-12-04 21:17:43 +01:00
if self . title . startswith ( YOUTUBE_PREFIX ) :
self . title = self . title [ len ( YOUTUBE_PREFIX ) : ] + ' on YouTube '
# End YouTube-specific title FIX
2008-03-29 16:33:18 +01:00
else :
2008-06-30 03:10:18 +02:00
self . title = self . url
2007-08-20 15:45:46 +02:00
if hasattr ( c . feed , ' link ' ) :
2008-06-30 03:10:18 +02:00
self . link = c . feed . link
2007-08-20 15:45:46 +02:00
if hasattr ( c . feed , ' subtitle ' ) :
2008-08-03 21:09:03 +02:00
self . description = c . feed . subtitle
2007-08-20 15:45:46 +02:00
2008-03-29 01:22:39 +01:00
if hasattr ( c . feed , ' updated_parsed ' ) and c . feed . updated_parsed is not None :
2008-07-14 18:46:59 +02:00
self . pubDate = rfc822 . mktime_tz ( c . feed . updated_parsed + ( 0 , ) )
2008-06-30 03:10:18 +02:00
else :
self . pubDate = time . time ( )
2007-08-20 15:45:46 +02:00
if hasattr ( c . feed , ' image ' ) :
2008-08-17 15:16:24 +02:00
if hasattr ( c . feed . image , ' href ' ) and c . feed . image . href :
2008-07-20 02:46:49 +02:00
old = self . image
2008-06-30 03:10:18 +02:00
self . image = c . feed . image . href
2008-07-20 02:46:49 +02:00
if old != self . image :
self . update_cover ( force = True )
2008-06-30 03:10:18 +02:00
# Marked as bulk because we commit after importing episodes.
db . save_channel ( self , bulk = True )
2007-08-20 15:45:46 +02:00
2008-10-06 21:54:05 +02:00
# Remove old episodes before adding the new ones. This helps
# deal with hyperactive channels, such as TV news, when there
# can be more new episodes than the user wants in the list.
# By cleaning up old episodes before receiving the new ones we
# ensure that the user doesn't miss any.
db . purge ( gl . config . max_episodes_per_feed , self . id )
2008-10-06 22:07:38 +02:00
# Load all episodes to update them properly.
existing = self . get_all_episodes ( )
2008-03-20 11:20:41 +01:00
# We can limit the maximum number of entries that gPodder will parse
# via the "max_episodes_per_feed" configuration option.
if len ( c . entries ) > gl . config . max_episodes_per_feed :
2008-06-30 03:10:18 +02:00
log ( ' Limiting number of episodes for %s to %d ' , self . title , gl . config . max_episodes_per_feed )
2008-03-20 11:20:41 +01:00
for entry in c . entries [ : min ( gl . config . max_episodes_per_feed , len ( c . entries ) ) ] :
2007-08-25 17:40:18 +02:00
episode = None
try :
2009-03-10 14:59:01 +01:00
episode = PodcastEpisode . from_feedparser_entry ( entry , self )
2008-06-30 03:10:18 +02:00
except Exception , e :
log ( ' Cannot instantiate episode " %s " : %s . Skipping. ' , entry . get ( ' id ' , ' (no id available) ' ) , e , sender = self , traceback = True )
2007-08-25 17:40:18 +02:00
2007-08-20 15:45:46 +02:00
if episode :
2008-10-06 22:07:38 +02:00
self . count_new + = 1
for ex in existing :
if ex . guid == episode . guid :
2008-10-13 17:22:24 +02:00
for k in ( ' title ' , ' title ' , ' description ' , ' link ' , ' pubDate ' ) :
2008-10-06 22:07:38 +02:00
setattr ( ex , k , getattr ( episode , k ) )
self . count_new - = 1
episode = ex
2008-06-30 03:10:18 +02:00
episode . save ( bulk = True )
2007-08-25 08:11:19 +02:00
2009-01-14 21:11:27 +01:00
db . commit ( )
2008-10-20 06:17:22 +02:00
return ( True , None )
2007-08-20 15:45:46 +02:00
2008-07-20 02:46:49 +02:00
def update_cover ( self , force = False ) :
if self . cover_file is None or not os . path . exists ( self . cover_file ) or force :
if self . image is not None :
services . cover_downloader . request_cover ( self )
2008-06-30 03:10:18 +02:00
def delete ( self ) :
db . delete_channel ( self )
2007-08-20 15:45:46 +02:00
2008-06-30 03:10:18 +02:00
def save ( self ) :
db . save_channel ( self )
2007-08-20 15:45:46 +02:00
2008-06-30 03:10:18 +02:00
def stat ( self , state = None , is_played = None , is_locked = None ) :
return db . get_channel_stat ( self . url , state = state , is_played = is_played , is_locked = is_locked )
2006-12-06 21:25:26 +01:00
2005-11-21 19:21:25 +01:00
def __init__ ( self , url = " " , title = " " , link = " " , description = " " ) :
2008-06-30 03:10:18 +02:00
self . id = None
2005-11-21 19:21:25 +01:00
self . url = url
self . title = title
self . link = link
2008-08-03 21:09:03 +02:00
self . description = description
2006-03-03 21:04:25 +01:00
self . image = None
2008-06-14 18:53:16 +02:00
self . pubDate = 0
2008-03-29 16:33:18 +01:00
self . parse_error = None
2008-06-13 14:30:42 +02:00
self . newest_pubdate_cached = None
2008-08-30 19:23:04 +02:00
self . update_flag = False # channel is updating or to be updated
self . iter = None
2009-02-06 15:54:28 +01:00
self . foldername = None
self . auto_foldername = 1 # automatically generated foldername
2007-08-20 15:45:46 +02:00
2006-04-07 03:43:06 +02:00
# should this channel be synced to devices? (ex: iPod)
self . sync_to_devices = True
2008-04-22 21:57:02 +02:00
# to which playlist should be synced
2006-04-08 11:09:15 +02:00
self . device_playlist_name = ' gPodder '
2007-03-08 13:11:10 +01:00
# if set, this overrides the channel-provided title
self . override_title = ' '
2007-07-19 14:44:12 +02:00
self . username = ' '
self . password = ' '
2007-08-20 15:45:46 +02:00
2008-06-30 03:10:18 +02:00
self . last_modified = None
self . etag = None
2008-03-20 11:17:31 +01:00
self . save_dir_size = 0
2008-06-05 18:17:09 +02:00
self . __save_dir_size_set = False
2007-11-14 21:57:31 +01:00
2008-10-14 18:54:04 +02:00
self . count_downloaded = 0
self . count_new = 0
self . count_unplayed = 0
2008-11-19 17:05:19 +01:00
self . channel_is_locked = False
2008-06-05 18:17:09 +02:00
def request_save_dir_size ( self ) :
if not self . __save_dir_size_set :
self . update_save_dir_size ( )
self . __save_dir_size_set = True
2007-11-14 21:57:31 +01:00
def update_save_dir_size ( self ) :
self . save_dir_size = util . calculate_size ( self . save_dir )
2006-08-02 20:24:48 +02:00
def get_title ( self ) :
2007-03-08 13:11:10 +01:00
if self . override_title :
return self . override_title
elif not self . __title . strip ( ) :
return self . url
else :
return self . __title
2006-08-02 20:24:48 +02:00
def set_title ( self , value ) :
self . __title = value . strip ( )
title = property ( fget = get_title ,
fset = set_title )
2007-03-08 13:11:10 +01:00
def set_custom_title ( self , custom_title ) :
custom_title = custom_title . strip ( )
2009-02-06 15:54:28 +01:00
# make sure self.foldername is initialized
self . get_save_dir ( )
# rename folder if custom_title looks sane
new_folder_name = self . find_unique_folder_name ( custom_title )
if len ( new_folder_name ) > 0 and new_folder_name != self . foldername :
log ( ' Changing foldername based on custom title: %s ' , custom_title , sender = self )
new_folder = os . path . join ( gl . downloaddir , new_folder_name )
old_folder = os . path . join ( gl . downloaddir , self . foldername )
if os . path . exists ( old_folder ) :
if not os . path . exists ( new_folder ) :
# Old folder exists, new folder does not -> simply rename
log ( ' Renaming %s => %s ' , old_folder , new_folder , sender = self )
os . rename ( old_folder , new_folder )
else :
# Both folders exist -> move files and delete old folder
log ( ' Moving files from %s to %s ' , old_folder , new_folder , sender = self )
for file in glob . glob ( os . path . join ( old_folder , ' * ' ) ) :
shutil . move ( file , new_folder )
log ( ' Removing %s ' , old_folder , sender = self )
shutil . rmtree ( old_folder , ignore_errors = True )
self . foldername = new_folder_name
self . save ( )
2007-03-08 13:11:10 +01:00
if custom_title != self . __title :
self . override_title = custom_title
else :
self . override_title = ' '
2006-04-07 03:43:06 +02:00
2008-06-30 03:10:18 +02:00
def get_downloaded_episodes ( self ) :
2009-03-10 14:59:01 +01:00
return db . load_episodes ( self , factory = self . episode_factory , state = db . STATE_DOWNLOADED )
2007-11-27 23:04:15 +01:00
2007-08-24 16:49:41 +02:00
def get_new_episodes ( self ) :
2009-04-01 01:12:17 +02:00
return [ episode for episode in db . load_episodes ( self , factory = self . episode_factory ) if episode . state == db . STATE_NORMAL and not episode . is_played ] # and not services.download_status_manager.is_download_in_progress(episode.url)]
2007-07-05 23:07:16 +02:00
2008-06-30 03:10:18 +02:00
def update_m3u_playlist ( self ) :
2008-03-10 16:50:12 +01:00
if gl . config . create_m3u_playlists :
2008-06-30 03:10:18 +02:00
downloaded_episodes = self . get_downloaded_episodes ( )
2008-03-10 16:50:12 +01:00
fn = util . sanitize_filename ( self . title )
if len ( fn ) == 0 :
fn = os . path . basename ( self . save_dir )
m3u_filename = os . path . join ( gl . downloaddir , fn + ' .m3u ' )
log ( ' Writing playlist to %s ' , m3u_filename , sender = self )
f = open ( m3u_filename , ' w ' )
f . write ( ' #EXTM3U \n ' )
2008-06-30 03:10:18 +02:00
2009-03-30 21:53:01 +02:00
# Check to see if we need to reverse the playlist order
if gl . config . reverse_m3u_playlist_order :
episodes_m3u = reversed ( downloaded_episodes )
else :
episodes_m3u = downloaded_episodes
for episode in episodes_m3u :
2009-02-09 23:26:47 +01:00
if episode . was_downloaded ( and_exists = True ) :
filename = episode . local_filename ( create = False )
assert filename is not None
if os . path . dirname ( filename ) . startswith ( os . path . dirname ( m3u_filename ) ) :
filename = filename [ len ( os . path . dirname ( m3u_filename ) + os . sep ) : ]
f . write ( ' #EXTINF:0, ' + self . title + ' - ' + episode . title + ' ( ' + episode . cute_pubdate ( ) + ' ) \n ' )
f . write ( filename + ' \n ' )
2008-03-10 16:50:12 +01:00
f . close ( )
2007-03-15 22:33:23 +01:00
2008-06-30 03:10:18 +02:00
def addDownloadedItem ( self , item ) :
log ( ' addDownloadedItem( %s ) ' , item . url )
if not item . was_downloaded ( ) :
2008-10-13 15:26:27 +02:00
item . mark_downloaded ( save = True )
2006-12-08 21:58:30 +01:00
2007-03-15 22:33:23 +01:00
# Update metadata on file (if possible and wanted)
2008-04-22 21:57:02 +02:00
if gl . config . update_tags and libtagupdate . tagging_supported ( ) :
2009-02-09 23:26:47 +01:00
filename = item . local_filename ( create = False )
assert filename is not None
2007-03-15 22:33:23 +01:00
try :
2008-10-07 19:39:37 +02:00
libtagupdate . update_metadata_on_file ( filename , title = item . title , artist = self . title , genre = ' Podcast ' )
2008-06-30 03:10:18 +02:00
except Exception , e :
log ( ' Error while calling update_metadata_on_file(): %s ' , e )
2008-06-13 14:30:42 +02:00
2008-06-30 03:10:18 +02:00
self . update_m3u_playlist ( )
2007-11-27 23:04:15 +01:00
2008-06-30 03:10:18 +02:00
def get_all_episodes ( self ) :
2009-03-10 14:59:01 +01:00
return db . load_episodes ( self , factory = self . episode_factory )
2007-11-27 23:04:15 +01:00
2009-04-01 01:12:17 +02:00
def iter_set_downloading_columns ( self , model , iter , episode = None , downloading = None ) :
2008-11-17 21:35:25 +01:00
global ICON_AUDIO_FILE , ICON_VIDEO_FILE
2008-04-06 02:19:03 +02:00
global ICON_DOWNLOADING , ICON_DELETED , ICON_NEW
2008-10-02 20:40:46 +02:00
if episode is None :
url = model . get_value ( iter , 0 )
2009-03-10 14:59:01 +01:00
episode = db . load_episode ( url , factory = self . episode_factory )
2008-10-02 20:40:46 +02:00
else :
url = episode . url
2008-02-06 10:29:56 +01:00
2009-01-19 19:00:24 +01:00
if gl . config . episode_list_descriptions or gpodder . interface == gpodder . MAEMO :
2008-02-06 10:29:56 +01:00
icon_size = 32
else :
icon_size = 16
2007-08-20 15:45:46 +02:00
2009-04-01 01:12:17 +02:00
if downloading is not None and downloading ( episode ) :
2008-04-06 02:19:03 +02:00
status_icon = util . get_tree_icon ( ICON_DOWNLOADING , icon_cache = self . icon_cache , icon_size = icon_size )
2007-08-20 15:45:46 +02:00
else :
2008-06-30 03:10:18 +02:00
if episode . state == db . STATE_NORMAL :
if episode . is_played :
status_icon = None
else :
status_icon = util . get_tree_icon ( ICON_NEW , icon_cache = self . icon_cache , icon_size = icon_size )
2008-10-06 22:07:38 +02:00
elif episode . was_downloaded ( ) :
2008-06-30 03:10:18 +02:00
missing = not episode . file_exists ( )
if missing :
log ( ' Episode missing: %s (before drawing an icon) ' , episode . url , sender = self )
2008-07-03 01:36:39 +02:00
file_type = util . file_type_by_extension ( model . get_value ( iter , 9 ) )
2008-06-30 03:10:18 +02:00
if file_type == ' audio ' :
status_icon = util . get_tree_icon ( ICON_AUDIO_FILE , not episode . is_played , episode . is_locked , not episode . file_exists ( ) , self . icon_cache , icon_size )
elif file_type == ' video ' :
status_icon = util . get_tree_icon ( ICON_VIDEO_FILE , not episode . is_played , episode . is_locked , not episode . file_exists ( ) , self . icon_cache , icon_size )
else :
status_icon = util . get_tree_icon ( ' unknown ' , not episode . is_played , episode . is_locked , not episode . file_exists ( ) , self . icon_cache , icon_size )
elif episode . state == db . STATE_DELETED or episode . state == db . STATE_DOWNLOADED :
2008-11-19 16:25:27 +01:00
status_icon = util . get_tree_icon ( ICON_DELETED , not episode . is_played , icon_cache = self . icon_cache , icon_size = icon_size )
2008-06-30 03:10:18 +02:00
else :
log ( ' Warning: Cannot determine status icon. ' , sender = self )
status_icon = None
2007-08-20 15:45:46 +02:00
model . set ( iter , 4 , status_icon )
2009-04-01 01:12:17 +02:00
def get_tree_model ( self , downloading = None ) :
2007-08-24 16:49:41 +02:00
"""
Return a gtk . ListStore containing episodes for this channel
2006-12-06 21:25:26 +01:00
"""
2008-07-03 01:36:39 +02:00
new_model = gtk . ListStore ( gobject . TYPE_STRING , gobject . TYPE_STRING , gobject . TYPE_STRING ,
gobject . TYPE_BOOLEAN , gtk . gdk . Pixbuf , gobject . TYPE_STRING , gobject . TYPE_STRING ,
gobject . TYPE_STRING , gobject . TYPE_STRING , gobject . TYPE_STRING )
2007-04-03 08:27:46 +02:00
2008-12-13 13:29:45 +01:00
log ( ' Returning TreeModel for %s ' , self . url , sender = self )
urls = [ ]
2006-12-09 01:41:58 +01:00
for item in self . get_all_episodes ( ) :
2008-08-04 15:34:29 +02:00
description = item . title_and_description
2008-06-30 03:10:18 +02:00
2008-12-06 19:15:11 +01:00
if item . length > 0 :
2008-06-30 03:10:18 +02:00
filelength = gl . format_filesize ( item . length , 1 )
else :
filelength = None
2008-07-03 01:36:39 +02:00
new_iter = new_model . append ( ( item . url , item . title , filelength ,
2008-08-03 21:09:03 +02:00
True , None , item . cute_pubdate ( ) , description , util . remove_html_tags ( item . description ) ,
2009-02-09 23:26:47 +01:00
' XXXXXXXXXXXXXUNUSEDXXXXXXXXXXXXXXXXXXX ' , item . extension ( ) ) )
2009-04-01 01:12:17 +02:00
self . iter_set_downloading_columns ( new_model , new_iter , episode = item , downloading = downloading )
2008-12-13 13:29:45 +01:00
urls . append ( item . url )
2005-11-21 19:21:25 +01:00
2007-11-14 21:57:31 +01:00
self . update_save_dir_size ( )
2008-12-13 13:29:45 +01:00
return ( new_model , urls )
2005-11-21 19:21:25 +01:00
2006-12-09 01:41:58 +01:00
def find_episode ( self , url ) :
2009-03-10 14:59:01 +01:00
return db . load_episode ( url , factory = self . episode_factory )
2005-11-21 19:21:25 +01:00
2009-02-06 15:54:28 +01:00
@classmethod
def find_unique_folder_name ( cls , foldername ) :
current_try = util . sanitize_filename ( foldername , cls . MAX_FOLDERNAME_LENGTH )
next_try_id = 2
2009-02-16 10:33:35 +01:00
while db . channel_foldername_exists ( current_try ) :
2009-02-06 15:54:28 +01:00
current_try = ' %s ( %d ) ' % ( foldername , next_try_id )
next_try_id + = 1
return current_try
2006-03-03 21:04:25 +01:00
def get_save_dir ( self ) :
2009-02-06 15:54:28 +01:00
urldigest = hashlib . md5 ( self . url ) . hexdigest ( )
sanitizedurl = util . sanitize_filename ( self . url , self . MAX_FOLDERNAME_LENGTH )
2009-02-18 13:41:35 +01:00
if self . foldername is None or ( self . auto_foldername and ( self . foldername == urldigest or self . foldername . startswith ( sanitizedurl ) ) ) :
2009-02-06 15:54:28 +01:00
# we must change the folder name, because it has not been set manually
fn_template = util . sanitize_filename ( self . title , self . MAX_FOLDERNAME_LENGTH )
# if this is an empty string, try the basename
if len ( fn_template ) == 0 :
log ( ' That is one ugly feed you have here! (Report this to bugs.gpodder.org: %s ) ' , self . url , sender = self )
fn_template = util . sanitize_filename ( os . path . basename ( self . url ) , self . MAX_FOLDERNAME_LENGTH )
# If the basename is also empty, use the first 6 md5 hexdigest chars of the URL
if len ( fn_template ) == 0 :
log ( ' That is one REALLY ugly feed you have here! (Report this to bugs.gpodder.org: %s ) ' , self . url , sender = self )
fn_template = urldigest # no need for sanitize_filename here
# Find a unique folder name for this podcast
wanted_foldername = self . find_unique_folder_name ( fn_template )
# if the foldername has not been set, check if the (old) md5 filename exists
if self . foldername is None and os . path . exists ( os . path . join ( gl . downloaddir , urldigest ) ) :
2009-02-09 23:26:47 +01:00
log ( ' Found pre-0.15.0 download folder for %s : %s ' , self . title , urldigest , sender = self )
2009-02-06 15:54:28 +01:00
self . foldername = urldigest
# we have a valid, new folder name in "current_try" -> use that!
if self . foldername is not None and wanted_foldername != self . foldername :
# there might be an old download folder crawling around - move it!
new_folder_name = os . path . join ( gl . downloaddir , wanted_foldername )
old_folder_name = os . path . join ( gl . downloaddir , self . foldername )
if os . path . exists ( old_folder_name ) :
if not os . path . exists ( new_folder_name ) :
# Old folder exists, new folder does not -> simply rename
log ( ' Renaming %s => %s ' , old_folder_name , new_folder_name , sender = self )
os . rename ( old_folder_name , new_folder_name )
else :
# Both folders exist -> move files and delete old folder
log ( ' Moving files from %s to %s ' , old_folder_name , new_folder_name , sender = self )
for file in glob . glob ( os . path . join ( old_folder_name , ' * ' ) ) :
shutil . move ( file , new_folder_name )
log ( ' Removing %s ' , old_folder_name , sender = self )
shutil . rmtree ( old_folder_name , ignore_errors = True )
log ( ' Updating foldername of %s to " %s " . ' , self . url , wanted_foldername , sender = self )
self . foldername = wanted_foldername
self . save ( )
save_dir = os . path . join ( gl . downloaddir , self . foldername )
2005-11-21 19:21:25 +01:00
2006-12-06 21:25:26 +01:00
# Create save_dir if it does not yet exist
2007-08-07 20:11:31 +02:00
if not util . make_directory ( save_dir ) :
2007-08-22 01:00:49 +02:00
log ( ' Could not create save_dir: %s ' , save_dir , sender = self )
2006-04-14 14:56:16 +02:00
2006-12-06 21:25:26 +01:00
return save_dir
save_dir = property ( fget = get_save_dir )
2006-03-03 21:04:25 +01:00
2006-12-06 21:25:26 +01:00
def remove_downloaded ( self ) :
shutil . rmtree ( self . save_dir , True )
2006-03-03 21:04:25 +01:00
def get_index_file ( self ) :
# gets index xml filename for downloaded channels list
2006-12-06 21:25:26 +01:00
return os . path . join ( self . save_dir , ' index.xml ' )
2005-11-21 19:21:25 +01:00
2006-03-03 21:04:25 +01:00
index_file = property ( fget = get_index_file )
2006-03-29 14:41:34 +02:00
2006-03-31 18:20:18 +02:00
def get_cover_file ( self ) :
# gets cover filename for cover download cache
2006-12-06 21:25:26 +01:00
return os . path . join ( self . save_dir , ' cover ' )
2006-03-31 18:20:18 +02:00
cover_file = property ( fget = get_cover_file )
2007-04-03 08:27:46 +02:00
2006-12-06 21:25:26 +01:00
def delete_episode_by_url ( self , url ) :
2009-03-10 14:59:01 +01:00
episode = db . load_episode ( url , factory = self . episode_factory )
2008-06-30 03:10:18 +02:00
if episode is not None :
2009-02-09 23:26:47 +01:00
filename = episode . local_filename ( create = False )
if filename is not None :
util . delete_file ( filename )
else :
log ( ' Cannot delete episode: %s (I have no filename!) ' , episode . title , sender = self )
2008-06-30 03:10:18 +02:00
episode . set_state ( db . STATE_DELETED )
2006-12-06 21:25:26 +01:00
2008-06-30 03:10:18 +02:00
self . update_m3u_playlist ( )
2007-03-14 20:35:15 +01:00
2006-03-24 20:08:59 +01:00
2009-03-10 14:59:01 +01:00
class PodcastEpisode ( PodcastModelObject ) :
2006-03-03 21:04:25 +01:00
""" holds data for one object in a channel """
2009-02-09 23:26:47 +01:00
MAX_FILENAME_LENGTH = 200
2007-08-20 15:45:46 +02:00
2009-03-10 14:59:01 +01:00
def reload_from_db ( self ) :
"""
Re - reads all episode details for this object from the
database and updates this object accordingly . Can be
used to refresh existing objects when the database has
been updated ( e . g . the filename has been set after a
download where it was not set before the download )
"""
d = db . load_episode ( self . url )
2008-06-30 03:10:18 +02:00
if d is not None :
2009-03-10 14:59:01 +01:00
self . update_from_dict ( d )
return self
2008-06-30 03:10:18 +02:00
2007-08-20 15:45:46 +02:00
@staticmethod
2007-08-22 01:00:49 +02:00
def from_feedparser_entry ( entry , channel ) :
2009-03-10 14:59:01 +01:00
episode = PodcastEpisode ( channel )
2007-08-20 15:45:46 +02:00
2007-08-26 20:21:23 +02:00
episode . title = entry . get ( ' title ' , util . get_first_line ( util . remove_html_tags ( entry . get ( ' summary ' , ' ' ) ) ) )
episode . link = entry . get ( ' link ' , ' ' )
2009-03-30 20:42:33 +02:00
episode . description = ' '
# Get the episode description (prefer summary, then subtitle)
for key in ( ' summary ' , ' subtitle ' , ' link ' ) :
if key in entry :
episode . description = entry [ key ]
if episode . description :
break
2007-08-26 23:56:06 +02:00
episode . guid = entry . get ( ' id ' , ' ' )
2007-08-30 20:49:53 +02:00
if entry . get ( ' updated_parsed ' , None ) :
2008-07-14 18:46:59 +02:00
episode . pubDate = rfc822 . mktime_tz ( entry . updated_parsed + ( 0 , ) )
2007-08-20 15:45:46 +02:00
2007-08-26 20:21:23 +02:00
if episode . title == ' ' :
log ( ' Warning: Episode has no title, adding anyways.. (Feed Is Buggy!) ' , sender = episode )
2008-04-11 10:13:17 +02:00
enclosure = None
if hasattr ( entry , ' enclosures ' ) and len ( entry . enclosures ) > 0 :
enclosure = entry . enclosures [ 0 ]
if len ( entry . enclosures ) > 1 :
for e in entry . enclosures :
if hasattr ( e , ' href ' ) and hasattr ( e , ' length ' ) and hasattr ( e , ' type ' ) and ( e . type . startswith ( ' audio/ ' ) or e . type . startswith ( ' video/ ' ) ) :
2008-04-22 21:16:30 +02:00
if util . normalize_feed_url ( e . href ) is not None :
2008-04-11 10:13:17 +02:00
log ( ' Selected enclosure: %s ' , e . href , sender = episode )
enclosure = e
break
episode . url = util . normalize_feed_url ( enclosure . get ( ' href ' , ' ' ) )
elif hasattr ( entry , ' link ' ) :
2008-07-03 01:36:39 +02:00
( filename , extension ) = util . filename_from_url ( entry . link )
if extension == ' ' and hasattr ( entry , ' type ' ) :
extension = util . extension_from_mimetype ( e . type )
2008-04-11 10:13:17 +02:00
file_type = util . file_type_by_extension ( extension )
if file_type is not None :
log ( ' Adding episode with link to file type " %s " . ' , file_type , sender = episode )
episode . url = entry . link
2007-09-02 14:27:38 +02:00
2009-01-26 12:37:21 +01:00
# YouTube specific
if not episode . url and hasattr ( entry , ' links ' ) and len ( entry . links ) and hasattr ( entry . links [ 0 ] , ' href ' ) :
episode . url = entry . links [ 0 ] . href
2007-09-02 14:27:38 +02:00
if not episode . url :
2009-01-26 12:37:21 +01:00
log ( ' Episode has no URL ' )
log ( ' Episode: %s ' , episode )
log ( ' Entry: %s ' , entry )
2008-06-14 15:57:34 +02:00
# This item in the feed has no downloadable enclosure
return None
2007-08-20 15:45:46 +02:00
2009-03-09 20:16:16 +01:00
metainfo = None
2008-03-02 13:56:16 +01:00
if not episode . pubDate :
2008-06-30 03:10:18 +02:00
metainfo = util . get_episode_info_from_url ( episode . url )
2008-03-02 13:56:16 +01:00
if ' pubdate ' in metainfo :
2008-06-17 14:50:27 +02:00
try :
2008-06-30 03:10:18 +02:00
episode . pubDate = int ( float ( metainfo [ ' pubdate ' ] ) )
2008-06-17 14:50:27 +02:00
except :
log ( ' Cannot convert pubDate " %s " in from_feedparser_entry. ' , str ( metainfo [ ' pubdate ' ] ) , traceback = True )
2008-03-02 13:56:16 +01:00
2008-12-06 19:15:11 +01:00
if hasattr ( enclosure , ' length ' ) :
2008-03-02 13:56:16 +01:00
try :
episode . length = int ( enclosure . length )
2009-03-11 11:05:21 +01:00
if episode . length == 0 :
raise ValueError ( ' Zero-length is not acceptable ' )
except ValueError , ve :
log ( ' Invalid episode length: %s ( %s ) ' , enclosure . length , ve . message )
2009-03-11 03:51:32 +01:00
episode . length = - 1
# If the configuration option is set, retrieve the length via a HTTP HEAD request
if gl . config . get_length_from_http_header_if_empty :
2009-03-11 11:05:21 +01:00
if enclosure . length == ' ' or episode . length == 0 :
2009-03-11 03:51:32 +01:00
if metainfo is None :
metainfo = util . get_episode_info_from_url ( episode . url )
if ' length ' in metainfo :
try :
episode . length = int ( float ( metainfo [ ' length ' ] ) )
except :
log ( ' Cannot convert lenght " %s " in from_feedparser_entry. ' , str ( metainfo [ ' length ' ] ) , traceback = True )
2008-03-02 13:56:16 +01:00
2007-08-31 23:40:15 +02:00
if hasattr ( enclosure , ' type ' ) :
episode . mimetype = enclosure . type
2007-08-20 15:45:46 +02:00
2007-08-27 00:04:50 +02:00
if episode . title == ' ' :
( filename , extension ) = os . path . splitext ( os . path . basename ( episode . url ) )
episode . title = filename
2007-08-20 15:45:46 +02:00
return episode
2007-08-22 01:00:49 +02:00
def __init__ ( self , channel ) :
2008-06-30 03:10:18 +02:00
# Used by Storage for faster saving
self . id = None
2007-08-19 15:01:15 +02:00
self . url = ' '
self . title = ' '
self . length = 0
2007-08-31 23:40:15 +02:00
self . mimetype = ' application/octet-stream '
2007-08-19 15:01:15 +02:00
self . guid = ' '
self . description = ' '
self . link = ' '
2007-08-22 01:00:49 +02:00
self . channel = channel
2008-12-12 14:58:22 +01:00
self . pubDate = 0
2009-02-06 15:54:28 +01:00
self . filename = None
self . auto_filename = 1 # automatically generated filename
2008-06-30 03:10:18 +02:00
self . state = db . STATE_NORMAL
self . is_played = False
2008-11-19 17:05:19 +01:00
self . is_locked = channel . channel_is_locked
2008-06-30 03:10:18 +02:00
def save ( self , bulk = False ) :
if self . state != db . STATE_DOWNLOADED and self . file_exists ( ) :
self . state = db . STATE_DOWNLOADED
db . save_episode ( self , bulk = bulk )
def set_state ( self , state ) :
self . state = state
db . mark_episode ( self . url , state = self . state , is_played = self . is_played , is_locked = self . is_locked )
def mark ( self , state = None , is_played = None , is_locked = None ) :
if state is not None :
self . state = state
if is_played is not None :
self . is_played = is_played
if is_locked is not None :
self . is_locked = is_locked
db . mark_episode ( self . url , state = state , is_played = is_played , is_locked = is_locked )
2008-03-02 13:56:16 +01:00
2008-10-13 15:26:27 +02:00
def mark_downloaded ( self , save = False ) :
self . state = db . STATE_DOWNLOADED
self . is_played = False
if save :
self . save ( )
2009-01-14 21:11:27 +01:00
db . commit ( )
2008-10-13 15:26:27 +02:00
2008-08-04 15:34:29 +02:00
@property
def title_and_description ( self ) :
"""
Returns Pango markup for displaying in a TreeView , and
disables the description when the config variable
" episode_list_descriptions " is not set .
"""
2009-01-19 19:00:24 +01:00
if gl . config . episode_list_descriptions and gpodder . interface != gpodder . MAEMO :
2008-08-04 15:34:29 +02:00
return ' %s \n <small> %s </small> ' % ( saxutils . escape ( self . title ) , saxutils . escape ( self . one_line_description ( ) ) )
else :
return saxutils . escape ( self . title )
2007-12-10 09:41:17 +01:00
def age_in_days ( self ) :
2009-02-09 23:26:47 +01:00
return util . file_age_in_days ( self . local_filename ( create = False ) )
2007-12-10 09:41:17 +01:00
def is_old ( self ) :
return self . age_in_days ( ) > gl . config . episode_old_age
def get_age_string ( self ) :
2008-01-28 12:38:53 +01:00
return util . file_age_to_string ( self . age_in_days ( ) )
2007-12-10 09:41:17 +01:00
age_prop = property ( fget = get_age_string )
2006-11-20 12:51:20 +01:00
def one_line_description ( self ) :
2008-08-03 21:09:03 +02:00
lines = util . remove_html_tags ( self . description ) . strip ( ) . splitlines ( )
2006-11-20 12:51:20 +01:00
if not lines or lines [ 0 ] == ' ' :
return _ ( ' No description available ' )
else :
2008-12-14 17:38:35 +01:00
return ' ' . join ( lines )
2006-12-06 21:25:26 +01:00
2007-12-18 10:18:33 +01:00
def delete_from_disk ( self ) :
try :
self . channel . delete_episode_by_url ( self . url )
except :
2008-04-22 22:24:19 +02:00
log ( ' Cannot delete episode from disk: %s ' , self . title , traceback = True , sender = self )
2007-11-08 20:11:57 +01:00
2009-02-09 23:26:47 +01:00
@classmethod
def find_unique_file_name ( cls , url , filename , extension ) :
current_try = util . sanitize_filename ( filename , cls . MAX_FILENAME_LENGTH ) + extension
next_try_id = 2
lookup_url = None
while db . episode_filename_exists ( current_try ) :
if next_try_id == 2 :
# If we arrive here, current_try has a collision, so
# try to resolve the URL for a better basename
log ( ' Filename collision: %s - trying to resolve... ' , current_try )
url = util . get_real_url ( url )
( episode_filename , extension_UNUSED ) = util . filename_from_url ( url )
2009-02-25 14:12:48 +01:00
current_try = util . sanitize_filename ( episode_filename , cls . MAX_FILENAME_LENGTH ) + extension
2009-02-09 23:26:47 +01:00
if not db . episode_filename_exists ( current_try ) :
log ( ' Filename %s is available - collision resolved. ' , current_try )
return current_try
else :
log ( ' Continuing search with %s as basename... ' , current_try )
current_try = ' %s ( %d ) %s ' % ( filename , next_try_id , extension )
next_try_id + = 1
return current_try
2009-02-14 13:31:27 +01:00
def local_filename ( self , create , force_update = False , check_only = False ) :
2009-02-09 23:26:47 +01:00
""" Get (and possibly generate) the local saving filename
Pass create = True if you want this function to generate a
new filename if none exists . You only want to do this when
planning to create / download the file after calling this function .
Normally , you should pass create = False . This will only
create a filename when the file already exists from a previous
version of gPodder ( where we used md5 filenames ) . If the file
does not exist ( and the filename also does not exist ) , this
function will return None .
If you pass force_update = True to this function , it will try to
find a new ( better ) filename and move the current file if this
is the case . This is useful if ( during the download ) you get
more information about the file , e . g . the mimetype and you want
to include this information in the file name generation process .
2009-02-14 13:31:27 +01:00
If check_only = True is passed to this function , it will never try
to rename the file , even if would be a good idea . Use this if you
only want to check if a file exists .
2009-02-09 23:26:47 +01:00
The generated filename is stored in the database for future access .
"""
2008-07-03 01:36:39 +02:00
ext = self . extension ( )
2009-02-09 23:26:47 +01:00
# For compatibility with already-downloaded episodes, we
# have to know md5 filenames if they are downloaded already
urldigest = hashlib . md5 ( self . url ) . hexdigest ( )
if not create and self . filename is None :
urldigest_filename = os . path . join ( self . channel . save_dir , urldigest + ext )
if os . path . exists ( urldigest_filename ) :
# The file exists, so set it up in our database
log ( ' Recovering pre-0.15.0 file: %s ' , urldigest_filename , sender = self )
self . filename = urldigest + ext
self . auto_filename = 1
self . save ( )
return urldigest_filename
return None
2009-02-14 13:31:27 +01:00
# We only want to check if the file exists, so don't try to
# rename the file, even if it would be reasonable. See also:
# http://bugs.gpodder.org/attachment.cgi?id=236
if check_only :
if self . filename is None :
return None
else :
return os . path . join ( self . channel . save_dir , self . filename )
2009-02-09 23:26:47 +01:00
if self . filename is None or force_update or ( self . auto_filename and self . filename == urldigest + ext ) :
# Try to find a new filename for the current file
( episode_filename , extension_UNUSED ) = util . filename_from_url ( self . url )
fn_template = util . sanitize_filename ( episode_filename , self . MAX_FILENAME_LENGTH )
if ' redirect ' in fn_template :
# This looks like a redirection URL - force URL resolving!
log ( ' Looks like a redirection to me: %s ' , self . url , sender = self )
url = util . get_real_url ( self . url )
log ( ' Redirection resolved to: %s ' , url , sender = self )
( episode_filename , extension_UNUSED ) = util . filename_from_url ( url )
fn_template = util . sanitize_filename ( episode_filename , self . MAX_FILENAME_LENGTH )
# If the basename is empty, use the md5 hexdigest of the URL
if len ( fn_template ) == 0 or fn_template . startswith ( ' redirect. ' ) :
log ( ' Report to bugs.gpodder.org: Podcast at %s with episode URL: %s ' , self . channel . url , self . url , sender = self )
fn_template = urldigest
# Find a unique filename for this episode
wanted_filename = self . find_unique_file_name ( self . url , fn_template , ext )
# We populate the filename field the first time - does the old file still exist?
if self . filename is None and os . path . exists ( os . path . join ( self . channel . save_dir , urldigest + ext ) ) :
log ( ' Found pre-0.15.0 downloaded file: %s ' , urldigest , sender = self )
self . filename = urldigest + ext
# The old file exists, but we have decided to want a different filename
if self . filename is not None and wanted_filename != self . filename :
# there might be an old download folder crawling around - move it!
new_file_name = os . path . join ( self . channel . save_dir , wanted_filename )
old_file_name = os . path . join ( self . channel . save_dir , self . filename )
if os . path . exists ( old_file_name ) and not os . path . exists ( new_file_name ) :
log ( ' Renaming %s => %s ' , old_file_name , new_file_name , sender = self )
os . rename ( old_file_name , new_file_name )
2009-02-25 14:12:48 +01:00
elif force_update and not os . path . exists ( old_file_name ) :
# When we call force_update, the file might not yet exist when we
# call it from the downloading code before saving the file
log ( ' Choosing new filename: %s ' , new_file_name , sender = self )
2009-02-09 23:26:47 +01:00
else :
log ( ' Warning: %s exists or %s does not. ' , new_file_name , old_file_name , sender = self )
log ( ' Updating filename of %s to " %s " . ' , self . url , wanted_filename , sender = self )
self . filename = wanted_filename
self . save ( )
return os . path . join ( self . channel . save_dir , self . filename )
2007-08-22 01:00:49 +02:00
2008-07-03 01:36:39 +02:00
def extension ( self ) :
( filename , ext ) = util . filename_from_url ( self . url )
# if we can't detect the extension from the url fallback on the mimetype
if ext == ' ' or util . file_type_by_extension ( ext ) is None :
ext = util . extension_from_mimetype ( self . mimetype )
2008-07-09 03:19:14 +02:00
#log('Getting extension from mimetype for: %s (mimetype: %s)' % (self.title, ext), sender=self)
2008-07-03 01:36:39 +02:00
return ext
2008-06-30 03:10:18 +02:00
def mark_new ( self ) :
self . state = db . STATE_NORMAL
self . is_played = False
db . mark_episode ( self . url , state = self . state , is_played = self . is_played )
def mark_old ( self ) :
self . is_played = True
db . mark_episode ( self . url , is_played = True )
def file_exists ( self ) :
2009-02-14 13:31:27 +01:00
filename = self . local_filename ( create = False , check_only = True )
2009-02-09 23:26:47 +01:00
if filename is None :
return False
else :
return os . path . exists ( filename )
2008-06-30 03:10:18 +02:00
def was_downloaded ( self , and_exists = False ) :
if self . state != db . STATE_DOWNLOADED :
return False
if and_exists and not self . file_exists ( ) :
return False
return True
2007-10-23 09:29:19 +02:00
def sync_filename ( self ) :
2008-03-02 14:22:29 +01:00
if gl . config . custom_sync_name_enabled :
2008-11-19 17:44:52 +01:00
if ' { channel ' in gl . config . custom_sync_name :
log ( ' Fixing OLD syntax { channel.*} => { podcast.*} in custom_sync_name. ' , sender = self )
gl . config . custom_sync_name = gl . config . custom_sync_name . replace ( ' { channel. ' , ' { podcast. ' )
return util . object_string_formatter ( gl . config . custom_sync_name , episode = self , podcast = self . channel )
2007-10-23 09:29:19 +02:00
else :
return self . title
2007-08-22 01:00:49 +02:00
def file_type ( self ) :
2008-07-03 01:36:39 +02:00
return util . file_type_by_extension ( self . extension ( ) )
2007-09-08 16:49:54 +02:00
@property
def basename ( self ) :
return os . path . splitext ( os . path . basename ( self . url ) ) [ 0 ]
@property
def published ( self ) :
2008-12-24 11:54:21 +01:00
"""
Returns published date as YYYYMMDD ( or 00000000 if not available )
"""
2007-09-08 16:49:54 +02:00
try :
2008-06-14 18:53:16 +02:00
return datetime . datetime . fromtimestamp ( self . pubDate ) . strftime ( ' % Y % m %d ' )
2007-09-08 16:49:54 +02:00
except :
log ( ' Cannot format pubDate for " %s " . ' , self . title , sender = self )
return ' 00000000 '
2008-12-24 11:54:21 +01:00
@property
def pubtime ( self ) :
"""
Returns published time as HHMM ( or 0000 if not available )
"""
try :
return datetime . datetime . fromtimestamp ( self . pubDate ) . strftime ( ' % H % M ' )
except :
log ( ' Cannot format pubDate (time) for " %s " . ' , self . title , sender = self )
return ' 0000 '
2007-08-22 01:00:49 +02:00
2008-06-30 03:10:18 +02:00
def cute_pubdate ( self ) :
2008-06-14 18:53:16 +02:00
result = util . format_date ( self . pubDate )
2008-04-19 19:01:09 +02:00
if result is None :
return ' ( %s ) ' % _ ( ' unknown ' )
else :
return result
2007-11-08 20:11:57 +01:00
pubdate_prop = property ( fget = cute_pubdate )
2006-12-09 01:41:58 +01:00
2007-08-22 01:00:49 +02:00
def calculate_filesize ( self ) :
2009-02-09 23:26:47 +01:00
filename = self . local_filename ( create = False )
if filename is None :
log ( ' calculate_filesized called, but filename is None! ' , sender = self )
2006-12-09 01:41:58 +01:00
try :
2009-02-09 23:26:47 +01:00
self . length = os . path . getsize ( filename )
2006-12-09 01:41:58 +01:00
except :
log ( ' Could not get filesize for %s . ' , self . url )
2007-11-08 20:11:57 +01:00
def get_filesize_string ( self ) :
2007-11-09 10:09:05 +01:00
return gl . format_filesize ( self . length )
2007-11-08 20:11:57 +01:00
filesize_prop = property ( fget = get_filesize_string )
def get_channel_title ( self ) :
return self . channel . title
channel_prop = property ( fget = get_channel_title )
def get_played_string ( self ) :
2008-06-30 03:10:18 +02:00
if not self . is_played :
2007-11-08 20:11:57 +01:00
return _ ( ' Unplayed ' )
return ' '
played_prop = property ( fget = get_played_string )
2006-04-10 18:46:50 +02:00
2006-08-02 20:24:48 +02:00
2006-06-13 23:00:31 +02:00
2008-09-06 22:34:35 +02:00
def update_channel_model_by_iter ( model , iter , channel , color_dict ,
2008-12-13 13:29:45 +01:00
cover_cache = None , max_width = 0 , max_height = 0 , initialize_all = False ) :
2008-09-06 22:34:35 +02:00
count_downloaded = channel . stat ( state = db . STATE_DOWNLOADED )
count_new = channel . stat ( state = db . STATE_NORMAL , is_played = False )
count_unplayed = channel . stat ( state = db . STATE_DOWNLOADED , is_played = False )
channel . iter = iter
2008-12-13 13:29:45 +01:00
if initialize_all :
model . set ( iter , 0 , channel . url )
2008-09-06 22:34:35 +02:00
2008-12-13 13:29:45 +01:00
model . set ( iter , 1 , channel . title )
2008-09-06 22:34:35 +02:00
title_markup = saxutils . escape ( channel . title )
description_markup = saxutils . escape ( util . get_first_line ( channel . description ) or _ ( ' No description available ' ) )
d = [ ]
if count_new :
d . append ( ' <span weight= " bold " > ' )
d . append ( title_markup )
if count_new :
d . append ( ' </span> ' )
description = ' ' . join ( d + [ ' \n ' , ' <small> ' , description_markup , ' </small> ' ] )
model . set ( iter , 2 , description )
if channel . parse_error is not None :
model . set ( iter , 6 , channel . parse_error )
color = color_dict [ ' parse_error ' ]
else :
color = color_dict [ ' default ' ]
if channel . update_flag :
color = color_dict [ ' updating ' ]
model . set ( iter , 8 , color )
if count_unplayed > 0 or count_downloaded > 0 :
model . set ( iter , 3 , draw . draw_pill_pixbuf ( str ( count_unplayed ) , str ( count_downloaded ) ) )
model . set ( iter , 7 , True )
else :
model . set ( iter , 7 , False )
2008-12-13 13:29:45 +01:00
if initialize_all :
# Load the cover if we have it, but don't download
# it if it's not available (to avoid blocking here)
pixbuf = services . cover_downloader . get_cover ( channel , avoid_downloading = True )
new_pixbuf = None
if pixbuf is not None :
new_pixbuf = util . resize_pixbuf_keep_ratio ( pixbuf , max_width , max_height , channel . url , cover_cache )
model . set ( iter , 5 , new_pixbuf or pixbuf )
2007-04-03 13:21:12 +02:00
2008-08-30 19:23:04 +02:00
def channels_to_model ( channels , color_dict , cover_cache = None , max_width = 0 , max_height = 0 ) :
new_model = gtk . ListStore ( str , str , str , gtk . gdk . Pixbuf , int ,
gtk . gdk . Pixbuf , str , bool , str )
2008-06-30 03:10:18 +02:00
2008-12-13 13:29:45 +01:00
urls = [ ]
2005-11-21 19:21:25 +01:00
for channel in channels :
2008-12-13 13:29:45 +01:00
update_channel_model_by_iter ( new_model , new_model . append ( ) , channel ,
color_dict , cover_cache , max_width , max_height , True )
urls . append ( channel . url )
2007-07-05 23:07:16 +02:00
2008-12-13 13:29:45 +01:00
return ( new_model , urls )
2005-11-21 19:21:25 +01:00
2007-08-20 15:45:46 +02:00
2008-06-30 03:10:18 +02:00
def load_channels ( ) :
2009-03-10 14:59:01 +01:00
return db . load_channels ( factory = PodcastChannel . create_from_dict )
2007-08-20 15:45:46 +02:00
2008-06-30 03:10:18 +02:00
def update_channels ( callback_proc = None , callback_error = None , is_cancelled_cb = None ) :
log ( ' Updating channels.... ' )
2007-11-12 20:29:53 +01:00
2008-06-30 03:10:18 +02:00
channels = load_channels ( )
2007-08-20 15:45:46 +02:00
count = 0
2008-06-30 03:10:18 +02:00
for channel in channels :
if is_cancelled_cb is not None and is_cancelled_cb ( ) :
return channels
callback_proc and callback_proc ( count , len ( channels ) )
channel . update ( )
2007-08-20 15:45:46 +02:00
count + = 1
2007-11-12 20:29:53 +01:00
2008-06-30 03:10:18 +02:00
return channels
2007-08-20 15:45:46 +02:00
def save_channels ( channels ) :
2008-03-02 14:22:29 +01:00
exporter = opml . Exporter ( gl . channel_opml_file )
2007-11-25 11:55:12 +01:00
return exporter . write ( channels )
2007-08-20 15:45:46 +02:00
2008-06-30 03:10:18 +02:00
def can_restore_from_opml ( ) :
try :
if len ( opml . Importer ( gl . channel_opml_file ) . items ) :
return gl . channel_opml_file
except :
return None
2007-08-20 15:45:46 +02:00
2007-09-15 16:29:37 +02:00
class LocalDBReader ( object ) :
2008-06-30 03:10:18 +02:00
"""
DEPRECATED - Only used for migration to SQLite
"""
2007-09-15 16:29:37 +02:00
def __init__ ( self , url ) :
self . url = url
def get_text ( self , nodelist ) :
return ' ' . join ( [ node . data for node in nodelist if node . nodeType == node . TEXT_NODE ] )
def get_text_by_first_node ( self , element , name ) :
return self . get_text ( element . getElementsByTagName ( name ) [ 0 ] . childNodes )
def get_episode_from_element ( self , channel , element ) :
2009-03-10 14:59:01 +01:00
episode = PodcastEpisode ( channel )
2007-09-15 16:29:37 +02:00
episode . title = self . get_text_by_first_node ( element , ' title ' )
episode . description = self . get_text_by_first_node ( element , ' description ' )
episode . url = self . get_text_by_first_node ( element , ' url ' )
episode . link = self . get_text_by_first_node ( element , ' link ' )
episode . guid = self . get_text_by_first_node ( element , ' guid ' )
2008-07-05 21:37:04 +02:00
if not episode . guid :
for k in ( ' url ' , ' link ' ) :
if getattr ( episode , k ) is not None :
episode . guid = getattr ( episode , k )
log ( ' Notice: episode has no guid, using %s ' , episode . guid )
break
2008-06-14 18:53:16 +02:00
try :
2008-06-30 03:10:18 +02:00
episode . pubDate = float ( self . get_text_by_first_node ( element , ' pubDate ' ) )
2008-06-14 18:53:16 +02:00
except :
2008-06-17 14:50:27 +02:00
log ( ' Looks like you have an old pubDate in your LocalDB -> converting it ' )
episode . pubDate = self . get_text_by_first_node ( element , ' pubDate ' )
2008-06-30 03:10:18 +02:00
log ( ' FYI: pubDate value is: " %s " ' , episode . pubDate , sender = self )
pubdate = feedparser . _parse_date ( episode . pubDate )
if pubdate is None :
log ( ' Error converting the old pubDate - sorry! ' , sender = self )
episode . pubDate = 0
else :
log ( ' PubDate converted successfully - yay! ' , sender = self )
episode . pubDate = time . mktime ( pubdate )
try :
episode . mimetype = self . get_text_by_first_node ( element , ' mimetype ' )
except :
log ( ' No mimetype info for %s ' , episode . url , sender = self )
2007-09-15 16:29:37 +02:00
episode . calculate_filesize ( )
return episode
def load_and_clean ( self , filename ) :
"""
Clean - up a LocalDB XML file that could potentially contain
" unbound prefix " XML elements ( generated by the old print - based
LocalDB code ) . The code removes those lines to make the new
DOM parser happy .
This should be removed in a future version .
"""
lines = [ ]
for line in open ( filename ) . read ( ) . split ( ' \n ' ) :
if not line . startswith ( ' <gpodder:info ' ) :
lines . append ( line )
return ' \n ' . join ( lines )
def read ( self , filename ) :
doc = xml . dom . minidom . parseString ( self . load_and_clean ( filename ) )
rss = doc . getElementsByTagName ( ' rss ' ) [ 0 ]
channel_element = rss . getElementsByTagName ( ' channel ' ) [ 0 ]
2009-03-10 14:59:01 +01:00
channel = PodcastChannel ( url = self . url )
2007-09-15 16:29:37 +02:00
channel . title = self . get_text_by_first_node ( channel_element , ' title ' )
channel . description = self . get_text_by_first_node ( channel_element , ' description ' )
channel . link = self . get_text_by_first_node ( channel_element , ' link ' )
2008-06-30 03:10:18 +02:00
episodes = [ ]
2007-09-15 16:29:37 +02:00
for episode_element in rss . getElementsByTagName ( ' item ' ) :
episode = self . get_episode_from_element ( channel , episode_element )
2008-06-30 03:10:18 +02:00
episodes . append ( episode )
2007-09-15 16:29:37 +02:00
2008-06-30 03:10:18 +02:00
return episodes
2007-09-15 16:29:37 +02:00