2007-08-29 20:30:26 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2005-11-21 19:21:25 +01:00
|
|
|
#
|
2007-08-29 20:30:26 +02:00
|
|
|
# gPodder - A media aggregator and podcast client
|
2018-01-28 19:39:53 +01:00
|
|
|
# Copyright (c) 2005-2018 The gPodder Team
|
2011-07-08 23:04:56 +02:00
|
|
|
# Copyright (c) 2011 Neal H. Walfield
|
2005-11-21 19:21:25 +01:00
|
|
|
#
|
2007-08-29 20:30:26 +02:00
|
|
|
# gPodder is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
2006-04-07 22:22:30 +02:00
|
|
|
#
|
2007-08-29 20:30:26 +02:00
|
|
|
# gPodder is distributed in the hope that it will be useful,
|
2006-04-07 22:22:30 +02:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2007-08-29 20:30:26 +02:00
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2006-04-07 22:22:30 +02:00
|
|
|
#
|
|
|
|
|
2005-11-21 19:21:25 +01:00
|
|
|
|
|
|
|
#
|
2009-08-13 23:36:18 +02:00
|
|
|
# gpodder.model - Core model classes for gPodder (2009-08-13)
|
|
|
|
# Based on libpodcasts.py (thp, 2005-10-29)
|
2005-11-21 19:21:25 +01:00
|
|
|
#
|
2007-08-07 20:11:31 +02:00
|
|
|
|
2008-04-22 21:57:02 +02:00
|
|
|
import gpodder
|
2007-08-07 20:11:31 +02:00
|
|
|
from gpodder import util
|
2009-06-12 00:51:13 +02:00
|
|
|
from gpodder import feedcore
|
2009-08-24 17:02:35 +02:00
|
|
|
from gpodder import youtube
|
2012-01-03 16:37:41 +01:00
|
|
|
from gpodder import vimeo
|
2014-09-19 17:57:30 +02:00
|
|
|
from gpodder import escapist_videos
|
2011-07-16 17:26:04 +02:00
|
|
|
from gpodder import schema
|
2012-03-05 10:44:19 +01:00
|
|
|
from gpodder import coverart
|
2007-09-15 16:29:37 +02:00
|
|
|
|
2011-07-15 16:32:06 +02:00
|
|
|
import logging
|
|
|
|
logger = logging.getLogger(__name__)
|
2006-02-04 11:37:23 +01:00
|
|
|
|
2006-12-06 21:25:26 +01:00
|
|
|
import os
|
2009-10-13 14:19:40 +02:00
|
|
|
import re
|
2006-12-06 21:25:26 +01:00
|
|
|
import glob
|
|
|
|
import shutil
|
2007-08-30 20:49:53 +02:00
|
|
|
import time
|
2008-04-22 21:57:02 +02:00
|
|
|
import datetime
|
2012-01-10 13:47:20 +01:00
|
|
|
|
2008-12-27 13:24:21 +01:00
|
|
|
import hashlib
|
2016-02-06 17:46:07 +01:00
|
|
|
import podcastparser
|
2011-07-16 14:30:08 +02:00
|
|
|
import collections
|
2011-08-07 23:39:46 +02:00
|
|
|
import string
|
2007-07-05 23:07:16 +02:00
|
|
|
|
2009-05-07 16:26:07 +02:00
|
|
|
_ = gpodder.gettext
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2008-03-02 13:56:16 +01:00
|
|
|
|
2012-05-24 23:17:55 +02:00
|
|
|
class CustomFeed(feedcore.ExceptionWithData): pass
|
|
|
|
|
2009-06-12 00:51:13 +02:00
|
|
|
class gPodderFetcher(feedcore.Fetcher):
|
|
|
|
"""
|
|
|
|
This class extends the feedcore Fetcher with the gPodder User-Agent and the
|
2010-12-20 15:17:48 +01:00
|
|
|
Proxy handler based on the current settings in gPodder.
|
2009-06-12 00:51:13 +02:00
|
|
|
"""
|
2009-11-04 15:18:07 +01:00
|
|
|
custom_handlers = []
|
2009-06-12 00:51:13 +02:00
|
|
|
|
|
|
|
def fetch_channel(self, channel):
|
|
|
|
# If we have a username or password, rebuild the url with them included
|
|
|
|
# Note: using a HTTPBasicAuthHandler would be pain because we need to
|
|
|
|
# know the realm. It can be done, but I think this method works, too
|
2009-08-24 13:04:11 +02:00
|
|
|
url = channel.authenticate_url(channel.url)
|
2009-11-04 15:18:07 +01:00
|
|
|
for handler in self.custom_handlers:
|
2009-11-18 00:01:15 +01:00
|
|
|
custom_feed = handler.handle_url(url)
|
|
|
|
if custom_feed is not None:
|
2012-07-10 11:57:59 +02:00
|
|
|
return feedcore.Result(feedcore.CUSTOM_FEED, custom_feed)
|
2016-12-18 13:04:25 +01:00
|
|
|
return self.fetch(url, channel.http_etag, channel.http_last_modified)
|
2009-06-12 00:51:13 +02:00
|
|
|
|
|
|
|
def _resolve_url(self, url):
|
2012-01-03 16:37:41 +01:00
|
|
|
url = youtube.get_real_channel_url(url)
|
|
|
|
url = vimeo.get_real_channel_url(url)
|
2014-09-19 17:57:30 +02:00
|
|
|
url = escapist_videos.get_real_channel_url(url)
|
2012-01-03 16:37:41 +01:00
|
|
|
return url
|
2009-06-12 00:51:13 +02:00
|
|
|
|
2009-11-18 00:01:15 +01:00
|
|
|
@classmethod
|
|
|
|
def register(cls, handler):
|
|
|
|
cls.custom_handlers.append(handler)
|
|
|
|
|
2018-02-10 11:11:20 +01:00
|
|
|
|
2009-11-18 00:01:15 +01:00
|
|
|
# The "register" method is exposed here for external usage
|
|
|
|
register_custom_handler = gPodderFetcher.register
|
2009-03-10 14:59:01 +01:00
|
|
|
|
2011-07-16 17:26:04 +02:00
|
|
|
# Our podcast model:
|
|
|
|
#
|
|
|
|
# database -> podcast -> episode -> download/playback
|
|
|
|
# podcast.parent == db
|
|
|
|
# podcast.children == [episode, ...]
|
|
|
|
# episode.parent == podcast
|
|
|
|
#
|
|
|
|
# - normally: episode.children = (None, None)
|
|
|
|
# - downloading: episode.children = (DownloadTask(), None)
|
|
|
|
# - playback: episode.children = (None, PlaybackTask())
|
|
|
|
|
|
|
|
|
2009-03-10 14:59:01 +01:00
|
|
|
class PodcastModelObject(object):
|
|
|
|
"""
|
|
|
|
A generic base class for our podcast model providing common helper
|
|
|
|
and utility functions.
|
|
|
|
"""
|
2012-12-28 18:03:02 +01:00
|
|
|
__slots__ = ('id', 'parent', 'children')
|
2009-03-10 14:59:01 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def create_from_dict(cls, d, *args):
|
|
|
|
"""
|
|
|
|
Create a new object, passing "args" to the constructor
|
|
|
|
and then updating the object with the values from "d".
|
|
|
|
"""
|
2011-07-16 17:26:04 +02:00
|
|
|
o = cls(*args)
|
|
|
|
|
|
|
|
# XXX: all(map(lambda k: hasattr(o, k), d))?
|
2016-11-21 23:13:46 +01:00
|
|
|
for k, v in d.items():
|
2011-07-16 17:26:04 +02:00
|
|
|
setattr(o, k, v)
|
2009-03-10 14:59:01 +01:00
|
|
|
|
2011-07-16 14:30:08 +02:00
|
|
|
return o
|
2009-03-10 14:59:01 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
class PodcastEpisode(PodcastModelObject):
|
|
|
|
"""holds data for one object in a channel"""
|
2015-07-01 23:17:39 +02:00
|
|
|
# In theory, Linux can have 255 bytes (not characters!) in a filename, but
|
|
|
|
# filesystems like eCryptFS store metadata in the filename, making the
|
|
|
|
# effective number of characters less than that. eCryptFS recommends
|
|
|
|
# 140 chars, we use 120 here (140 - len(extension) - len(".partial")).
|
|
|
|
# References: gPodder bug 1898, http://unix.stackexchange.com/a/32834
|
|
|
|
MAX_FILENAME_LENGTH = 120
|
2009-03-10 14:59:01 +01:00
|
|
|
|
2011-07-16 17:26:04 +02:00
|
|
|
__slots__ = schema.EpisodeColumns
|
|
|
|
|
2011-02-26 16:32:34 +01:00
|
|
|
def _deprecated(self):
|
|
|
|
raise Exception('Property is deprecated!')
|
2008-06-30 03:10:18 +02:00
|
|
|
|
2011-02-26 16:32:34 +01:00
|
|
|
is_played = property(fget=_deprecated, fset=_deprecated)
|
2011-02-26 16:48:48 +01:00
|
|
|
is_locked = property(fget=_deprecated, fset=_deprecated)
|
2010-04-29 13:47:19 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def has_website_link(self):
|
|
|
|
return bool(self.link) and (self.link != self.url or \
|
|
|
|
youtube.is_video_link(self.link))
|
2010-08-27 14:32:15 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
@classmethod
|
2016-02-06 17:46:07 +01:00
|
|
|
def from_podcastparser_entry(cls, entry, channel):
|
2010-12-20 15:17:48 +01:00
|
|
|
episode = cls(channel)
|
2016-02-06 17:46:07 +01:00
|
|
|
episode.guid = entry['guid']
|
|
|
|
episode.title = entry['title']
|
|
|
|
episode.link = entry['link']
|
|
|
|
episode.description = entry['description']
|
2017-03-25 16:34:05 +01:00
|
|
|
if entry.get('description_html'):
|
|
|
|
episode.description_html = entry['description_html']
|
2017-04-06 23:06:53 +02:00
|
|
|
# TODO: This really should be handled in podcastparser and not here.
|
2017-04-18 17:23:24 +02:00
|
|
|
elif util.is_html(entry['description']):
|
2017-03-25 16:34:05 +01:00
|
|
|
episode.description_html = entry['description']
|
2017-04-18 17:23:24 +02:00
|
|
|
episode.description = util.remove_html_tags(entry['description'])
|
|
|
|
|
2016-02-06 17:46:07 +01:00
|
|
|
episode.total_time = entry['total_time']
|
|
|
|
episode.published = entry['published']
|
|
|
|
episode.payment_url = entry['payment_url']
|
2009-06-12 00:51:13 +02:00
|
|
|
|
2016-02-06 17:46:07 +01:00
|
|
|
audio_available = any(enclosure['mime_type'].startswith('audio/') for enclosure in entry['enclosures'])
|
|
|
|
video_available = any(enclosure['mime_type'].startswith('video/') for enclosure in entry['enclosures'])
|
2012-12-28 15:34:20 +01:00
|
|
|
|
2016-02-06 17:46:07 +01:00
|
|
|
for enclosure in entry['enclosures']:
|
|
|
|
episode.mime_type = enclosure['mime_type']
|
2010-02-28 04:10:39 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Skip images in feeds if audio or video is available (bug 979)
|
2011-09-06 11:47:57 +02:00
|
|
|
# This must (and does) also look in Media RSS enclosures (bug 1430)
|
2016-02-06 17:46:07 +01:00
|
|
|
if episode.mime_type.startswith('image/') and (audio_available or video_available):
|
2010-12-20 15:17:48 +01:00
|
|
|
continue
|
2010-03-11 19:41:29 +01:00
|
|
|
|
2011-04-04 18:00:14 +02:00
|
|
|
# If we have audio or video available later on, skip
|
|
|
|
# 'application/octet-stream' data types (fixes Linux Outlaws)
|
2016-02-06 17:46:07 +01:00
|
|
|
if episode.mime_type == 'application/octet-stream' and (audio_available or video_available):
|
2011-04-04 18:00:14 +02:00
|
|
|
continue
|
|
|
|
|
2016-02-06 17:46:07 +01:00
|
|
|
episode.url = util.normalize_feed_url(enclosure['url'])
|
2010-12-20 15:17:48 +01:00
|
|
|
if not episode.url:
|
2010-02-28 04:10:39 +01:00
|
|
|
continue
|
|
|
|
|
2016-02-06 17:46:07 +01:00
|
|
|
episode.file_size = enclosure['file_size']
|
2010-12-20 15:17:48 +01:00
|
|
|
return episode
|
|
|
|
|
2016-02-06 17:46:07 +01:00
|
|
|
# Brute-force detection of the episode link
|
|
|
|
episode.url = util.normalize_feed_url(entry['link'])
|
|
|
|
if not episode.url:
|
|
|
|
return None
|
2011-11-12 14:57:40 +01:00
|
|
|
|
2016-02-06 17:46:07 +01:00
|
|
|
if any(mod.is_video_link(episode.url) for mod in (youtube, vimeo, escapist_videos)):
|
2010-12-20 15:17:48 +01:00
|
|
|
return episode
|
2010-03-11 19:41:29 +01:00
|
|
|
|
2016-02-06 17:46:07 +01:00
|
|
|
# Check if we can resolve this link to a audio/video file
|
|
|
|
filename, extension = util.filename_from_url(episode.url)
|
|
|
|
file_type = util.file_type_by_extension(extension)
|
2009-09-28 15:00:38 +02:00
|
|
|
|
2016-02-06 17:46:07 +01:00
|
|
|
# The link points to a audio or video file - use it!
|
|
|
|
if file_type is not None:
|
|
|
|
return episode
|
2009-06-12 00:51:13 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
return None
|
2009-08-24 13:04:11 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def __init__(self, channel):
|
2011-07-16 17:26:04 +02:00
|
|
|
self.parent = channel
|
2012-10-23 12:51:00 +02:00
|
|
|
self.podcast_id = self.parent.id
|
2011-07-16 18:04:07 +02:00
|
|
|
self.children = (None, None)
|
2011-02-26 16:48:48 +01:00
|
|
|
|
2008-06-30 03:10:18 +02:00
|
|
|
self.id = None
|
2010-12-20 15:17:48 +01:00
|
|
|
self.url = ''
|
2009-08-13 20:39:00 +02:00
|
|
|
self.title = ''
|
2010-12-20 15:17:48 +01:00
|
|
|
self.file_size = 0
|
|
|
|
self.mime_type = 'application/octet-stream'
|
|
|
|
self.guid = ''
|
2009-08-13 20:39:00 +02:00
|
|
|
self.description = ''
|
2017-03-25 16:34:05 +01:00
|
|
|
self.description_html = ''
|
2010-12-20 15:17:48 +01:00
|
|
|
self.link = ''
|
2010-12-20 14:35:46 +01:00
|
|
|
self.published = 0
|
2010-12-20 15:17:48 +01:00
|
|
|
self.download_filename = None
|
2012-05-24 23:17:55 +02:00
|
|
|
self.payment_url = None
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
self.state = gpodder.STATE_NORMAL
|
|
|
|
self.is_new = True
|
|
|
|
self.archive = channel.auto_archive_episodes
|
2010-12-20 14:35:46 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Time attributes
|
|
|
|
self.total_time = 0
|
|
|
|
self.current_position = 0
|
|
|
|
self.current_position_updated = 0
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2011-02-11 16:25:56 +01:00
|
|
|
# Timestamp of last playback time
|
|
|
|
self.last_playback = 0
|
|
|
|
|
2011-07-16 17:26:04 +02:00
|
|
|
@property
|
|
|
|
def channel(self):
|
|
|
|
return self.parent
|
|
|
|
|
|
|
|
@property
|
|
|
|
def db(self):
|
2011-10-12 19:59:09 +02:00
|
|
|
return self.parent.parent.db
|
2011-07-16 17:26:04 +02:00
|
|
|
|
2011-12-05 16:32:03 +01:00
|
|
|
@property
|
|
|
|
def trimmed_title(self):
|
|
|
|
"""Return the title with the common prefix trimmed"""
|
2012-01-11 11:29:06 +01:00
|
|
|
# Minimum amount of leftover characters after trimming. This
|
|
|
|
# avoids things like "Common prefix 123" to become just "123".
|
|
|
|
# If there are LEFTOVER_MIN or less characters after trimming,
|
|
|
|
# the original title will be returned without trimming.
|
|
|
|
LEFTOVER_MIN = 5
|
|
|
|
|
|
|
|
# "Podcast Name - Title" and "Podcast Name: Title" -> "Title"
|
|
|
|
for postfix in (' - ', ': '):
|
|
|
|
prefix = self.parent.title + postfix
|
|
|
|
if (self.title.startswith(prefix) and
|
|
|
|
len(self.title)-len(prefix) > LEFTOVER_MIN):
|
|
|
|
return self.title[len(prefix):]
|
|
|
|
|
2012-02-28 21:04:14 +01:00
|
|
|
regex_patterns = [
|
2012-02-28 12:47:01 +01:00
|
|
|
# "Podcast Name <number>: ..." -> "<number>: ..."
|
|
|
|
r'^%s (\d+: .*)' % re.escape(self.parent.title),
|
|
|
|
|
|
|
|
# "Episode <number>: ..." -> "<number>: ..."
|
|
|
|
r'Episode (\d+:.*)',
|
2012-02-28 21:04:14 +01:00
|
|
|
]
|
2012-02-28 12:47:01 +01:00
|
|
|
|
|
|
|
for pattern in regex_patterns:
|
|
|
|
if re.match(pattern, self.title):
|
|
|
|
title = re.sub(pattern, r'\1', self.title)
|
|
|
|
if len(title) > LEFTOVER_MIN:
|
|
|
|
return title
|
|
|
|
|
2012-02-02 21:46:41 +01:00
|
|
|
# "#001: Title" -> "001: Title"
|
2018-02-06 15:19:08 +01:00
|
|
|
if (not self.parent._common_prefix
|
|
|
|
and re.match('^#\d+: ', self.title)
|
|
|
|
and len(self.title)-1 > LEFTOVER_MIN):
|
2012-02-02 21:46:41 +01:00
|
|
|
return self.title[1:]
|
|
|
|
|
2011-12-05 16:32:03 +01:00
|
|
|
if (self.parent._common_prefix is not None and
|
2012-01-05 13:28:09 +01:00
|
|
|
self.title.startswith(self.parent._common_prefix) and
|
2012-01-11 11:29:06 +01:00
|
|
|
len(self.title)-len(self.parent._common_prefix) > LEFTOVER_MIN):
|
2011-12-05 16:32:03 +01:00
|
|
|
return self.title[len(self.parent._common_prefix):]
|
|
|
|
|
|
|
|
return self.title
|
|
|
|
|
2011-07-16 18:04:07 +02:00
|
|
|
def _set_download_task(self, download_task):
|
|
|
|
self.children = (download_task, self.children[1])
|
|
|
|
|
|
|
|
def _get_download_task(self):
|
|
|
|
return self.children[0]
|
|
|
|
|
|
|
|
download_task = property(_get_download_task, _set_download_task)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def downloading(self):
|
|
|
|
task = self.download_task
|
|
|
|
if task is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return task.status in (task.DOWNLOADING, task.QUEUED, task.PAUSED)
|
|
|
|
|
2011-07-16 20:51:26 +02:00
|
|
|
def check_is_new(self):
|
|
|
|
return (self.state == gpodder.STATE_NORMAL and self.is_new and
|
|
|
|
not self.downloading)
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def save(self):
|
2012-02-04 21:43:37 +01:00
|
|
|
gpodder.user_extensions.on_episode_save(self)
|
2010-12-20 15:17:48 +01:00
|
|
|
self.db.save_episode(self)
|
2009-06-12 02:44:04 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def on_downloaded(self, filename):
|
|
|
|
self.state = gpodder.STATE_DOWNLOADED
|
|
|
|
self.is_new = True
|
|
|
|
self.file_size = os.path.getsize(filename)
|
|
|
|
self.save()
|
2006-08-02 20:24:48 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def set_state(self, state):
|
|
|
|
self.state = state
|
2011-07-16 14:30:08 +02:00
|
|
|
self.save()
|
2006-08-02 20:24:48 +02:00
|
|
|
|
2011-02-11 16:25:56 +01:00
|
|
|
def playback_mark(self):
|
|
|
|
self.is_new = False
|
|
|
|
self.last_playback = int(time.time())
|
2013-01-19 17:07:00 +01:00
|
|
|
gpodder.user_extensions.on_episode_playback(self)
|
2011-02-11 16:25:56 +01:00
|
|
|
self.save()
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def mark(self, state=None, is_played=None, is_locked=None):
|
|
|
|
if state is not None:
|
|
|
|
self.state = state
|
|
|
|
if is_played is not None:
|
|
|
|
self.is_new = not is_played
|
2012-02-28 21:04:14 +01:00
|
|
|
|
|
|
|
# "Mark as new" must "undelete" the episode
|
|
|
|
if self.is_new and self.state == gpodder.STATE_DELETED:
|
|
|
|
self.state = gpodder.STATE_NORMAL
|
2010-12-20 15:17:48 +01:00
|
|
|
if is_locked is not None:
|
2011-02-26 16:48:48 +01:00
|
|
|
self.archive = is_locked
|
2011-07-16 14:30:08 +02:00
|
|
|
self.save()
|
2007-03-08 13:11:10 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def age_in_days(self):
|
|
|
|
return util.file_age_in_days(self.local_filename(create=False, \
|
|
|
|
check_only=True))
|
2009-02-06 15:54:28 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
age_int_prop = property(fget=age_in_days)
|
2006-04-07 03:43:06 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def get_age_string(self):
|
|
|
|
return util.file_age_to_string(self.age_in_days())
|
2009-04-01 13:34:19 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
age_prop = property(fget=get_age_string)
|
2007-07-05 23:07:16 +02:00
|
|
|
|
2011-07-29 13:53:49 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def one_line_description(self):
|
|
|
|
MAX_LINE_LENGTH = 120
|
|
|
|
desc = util.remove_html_tags(self.description or '')
|
2011-04-10 22:57:51 +02:00
|
|
|
desc = re.sub('\s+', ' ', desc).strip()
|
2010-12-20 15:17:48 +01:00
|
|
|
if not desc:
|
|
|
|
return _('No description available')
|
|
|
|
else:
|
2011-02-28 20:28:13 +01:00
|
|
|
# Decode the description to avoid gPodder bug 1277
|
2012-01-10 13:47:20 +01:00
|
|
|
desc = util.convert_bytes(desc).strip()
|
2011-08-03 18:09:19 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
if len(desc) > MAX_LINE_LENGTH:
|
|
|
|
return desc[:MAX_LINE_LENGTH] + '...'
|
|
|
|
else:
|
|
|
|
return desc
|
2009-10-13 18:43:54 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def delete_from_disk(self):
|
2010-12-20 15:26:11 +01:00
|
|
|
filename = self.local_filename(create=False, check_only=True)
|
|
|
|
if filename is not None:
|
2012-02-04 21:43:37 +01:00
|
|
|
gpodder.user_extensions.on_episode_delete(self, filename)
|
2010-12-20 15:26:11 +01:00
|
|
|
util.delete_file(filename)
|
|
|
|
|
|
|
|
self.set_state(gpodder.STATE_DELETED)
|
2009-07-06 16:14:36 +02:00
|
|
|
|
2014-09-30 13:35:23 +02:00
|
|
|
def get_playback_url(self, fmt_ids=None, vimeo_fmt=None, allow_partial=False):
|
2011-06-08 11:01:36 +02:00
|
|
|
"""Local (or remote) playback/streaming filename/URL
|
|
|
|
|
|
|
|
Returns either the local filename or a streaming URL that
|
|
|
|
can be used to playback this episode.
|
2011-07-27 13:37:58 +02:00
|
|
|
|
|
|
|
Also returns the filename of a partially downloaded file
|
|
|
|
in case partial (preview) playback is desired.
|
2011-06-08 11:01:36 +02:00
|
|
|
"""
|
|
|
|
url = self.local_filename(create=False)
|
2011-07-27 13:37:58 +02:00
|
|
|
|
2011-08-05 23:58:30 +02:00
|
|
|
if (allow_partial and url is not None and
|
|
|
|
os.path.exists(url + '.partial')):
|
2011-07-27 13:37:58 +02:00
|
|
|
return url + '.partial'
|
|
|
|
|
2011-06-08 11:01:36 +02:00
|
|
|
if url is None or not os.path.exists(url):
|
|
|
|
url = self.url
|
2012-09-19 13:43:20 +02:00
|
|
|
url = youtube.get_real_download_url(url, fmt_ids)
|
2014-09-30 13:35:23 +02:00
|
|
|
url = vimeo.get_real_download_url(url, vimeo_fmt)
|
2014-09-19 17:57:30 +02:00
|
|
|
url = escapist_videos.get_real_download_url(url)
|
2011-06-08 11:01:36 +02:00
|
|
|
|
|
|
|
return url
|
|
|
|
|
2011-02-25 23:27:59 +01:00
|
|
|
def find_unique_file_name(self, filename, extension):
|
2011-08-07 23:39:46 +02:00
|
|
|
# Remove leading and trailing whitespace + dots (to avoid hidden files)
|
|
|
|
filename = filename.strip('.' + string.whitespace) + extension
|
2009-10-13 18:43:54 +02:00
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
for name in util.generate_names(filename):
|
|
|
|
if (not self.db.episode_filename_exists(self.podcast_id, name) or
|
|
|
|
self.download_filename == name):
|
|
|
|
return name
|
2010-06-12 17:51:54 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def local_filename(self, create, force_update=False, check_only=False,
|
2011-02-25 00:45:49 +01:00
|
|
|
template=None, return_wanted_filename=False):
|
2010-12-20 15:17:48 +01:00
|
|
|
"""Get (and possibly generate) the local saving filename
|
2007-11-27 23:04:15 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
Pass create=True if you want this function to generate a
|
|
|
|
new filename if none exists. You only want to do this when
|
|
|
|
planning to create/download the file after calling this function.
|
2009-12-12 14:23:51 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
Normally, you should pass create=False. This will only
|
|
|
|
create a filename when the file already exists from a previous
|
|
|
|
version of gPodder (where we used md5 filenames). If the file
|
|
|
|
does not exist (and the filename also does not exist), this
|
|
|
|
function will return None.
|
2009-02-06 15:54:28 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
If you pass force_update=True to this function, it will try to
|
|
|
|
find a new (better) filename and move the current file if this
|
|
|
|
is the case. This is useful if (during the download) you get
|
|
|
|
more information about the file, e.g. the mimetype and you want
|
|
|
|
to include this information in the file name generation process.
|
2009-02-06 15:54:28 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
If check_only=True is passed to this function, it will never try
|
|
|
|
to rename the file, even if would be a good idea. Use this if you
|
|
|
|
only want to check if a file exists.
|
2009-02-06 15:54:28 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
If "template" is specified, it should be a filename that is to
|
|
|
|
be used as a template for generating the "real" filename.
|
2009-02-06 15:54:28 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
The generated filename is stored in the database for future access.
|
2011-02-25 00:45:49 +01:00
|
|
|
|
|
|
|
If return_wanted_filename is True, the filename will not be written to
|
|
|
|
the database, but simply returned by this function (for use by the
|
|
|
|
"import external downloads" feature).
|
2010-12-20 15:17:48 +01:00
|
|
|
"""
|
2011-02-25 23:27:59 +01:00
|
|
|
if self.download_filename is None and (check_only or not create):
|
2010-12-20 15:17:48 +01:00
|
|
|
return None
|
2009-02-06 15:54:28 +01:00
|
|
|
|
2016-11-21 23:13:46 +01:00
|
|
|
ext = self.extension(may_call_local_filename=False)
|
2009-02-06 15:54:28 +01:00
|
|
|
|
2011-02-25 23:27:59 +01:00
|
|
|
if not check_only and (force_update or not self.download_filename):
|
2011-10-04 10:36:43 +02:00
|
|
|
# Avoid and catch gPodder bug 1440 and similar situations
|
|
|
|
if template == '':
|
|
|
|
logger.warn('Empty template. Report this podcast URL %s',
|
|
|
|
self.channel.url)
|
|
|
|
template = None
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Try to find a new filename for the current file
|
|
|
|
if template is not None:
|
|
|
|
# If template is specified, trust the template's extension
|
|
|
|
episode_filename, ext = os.path.splitext(template)
|
|
|
|
else:
|
2011-02-25 23:27:59 +01:00
|
|
|
episode_filename, _ = util.filename_from_url(self.url)
|
2010-12-20 15:17:48 +01:00
|
|
|
fn_template = util.sanitize_filename(episode_filename, self.MAX_FILENAME_LENGTH)
|
2005-11-21 19:21:25 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
if 'redirect' in fn_template and template is None:
|
|
|
|
# This looks like a redirection URL - force URL resolving!
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.warn('Looks like a redirection to me: %s', self.url)
|
2010-12-20 15:17:48 +01:00
|
|
|
url = util.get_real_url(self.channel.authenticate_url(self.url))
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Redirection resolved to: %s', url)
|
2011-02-25 23:27:59 +01:00
|
|
|
episode_filename, _ = util.filename_from_url(url)
|
2010-12-20 15:17:48 +01:00
|
|
|
fn_template = util.sanitize_filename(episode_filename, self.MAX_FILENAME_LENGTH)
|
|
|
|
|
2012-12-29 18:51:40 +01:00
|
|
|
# Use title for YouTube, Vimeo and Soundcloud downloads
|
|
|
|
if (youtube.is_video_link(self.url) or
|
|
|
|
vimeo.is_video_link(self.url) or
|
2014-09-26 23:12:08 +02:00
|
|
|
escapist_videos.is_video_link(self.url) or
|
2012-12-29 18:51:40 +01:00
|
|
|
fn_template == 'stream'):
|
2010-12-20 15:17:48 +01:00
|
|
|
sanitized = util.sanitize_filename(self.title, self.MAX_FILENAME_LENGTH)
|
|
|
|
if sanitized:
|
|
|
|
fn_template = sanitized
|
2006-03-03 21:04:25 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# If the basename is empty, use the md5 hexdigest of the URL
|
2011-02-25 23:27:59 +01:00
|
|
|
if not fn_template or fn_template.startswith('redirect.'):
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.error('Report this feed: Podcast %s, episode %s',
|
|
|
|
self.channel.url, self.url)
|
2011-02-25 23:27:59 +01:00
|
|
|
fn_template = hashlib.md5(self.url).hexdigest()
|
2010-06-28 16:03:54 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Find a unique filename for this episode
|
2011-02-25 23:27:59 +01:00
|
|
|
wanted_filename = self.find_unique_file_name(fn_template, ext)
|
2010-06-28 16:03:54 +02:00
|
|
|
|
2011-02-25 00:45:49 +01:00
|
|
|
if return_wanted_filename:
|
|
|
|
# return the calculated filename without updating the database
|
|
|
|
return wanted_filename
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# The old file exists, but we have decided to want a different filename
|
2011-02-25 23:27:59 +01:00
|
|
|
if self.download_filename and wanted_filename != self.download_filename:
|
2010-12-20 15:17:48 +01:00
|
|
|
# there might be an old download folder crawling around - move it!
|
|
|
|
new_file_name = os.path.join(self.channel.save_dir, wanted_filename)
|
|
|
|
old_file_name = os.path.join(self.channel.save_dir, self.download_filename)
|
|
|
|
if os.path.exists(old_file_name) and not os.path.exists(new_file_name):
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Renaming %s => %s', old_file_name, new_file_name)
|
2010-12-20 15:17:48 +01:00
|
|
|
os.rename(old_file_name, new_file_name)
|
|
|
|
elif force_update and not os.path.exists(old_file_name):
|
|
|
|
# When we call force_update, the file might not yet exist when we
|
|
|
|
# call it from the downloading code before saving the file
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Choosing new filename: %s', new_file_name)
|
2010-12-20 15:17:48 +01:00
|
|
|
else:
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.warn('%s exists or %s does not', new_file_name, old_file_name)
|
|
|
|
logger.info('Updating filename of %s to "%s".', self.url, wanted_filename)
|
2010-12-20 15:17:48 +01:00
|
|
|
elif self.download_filename is None:
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Setting download filename: %s', wanted_filename)
|
2010-12-20 15:17:48 +01:00
|
|
|
self.download_filename = wanted_filename
|
|
|
|
self.save()
|
2007-04-03 08:27:46 +02:00
|
|
|
|
2016-11-21 23:40:05 +01:00
|
|
|
return os.path.join(self.channel.save_dir, self.download_filename)
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def extension(self, may_call_local_filename=True):
|
|
|
|
filename, ext = util.filename_from_url(self.url)
|
|
|
|
if may_call_local_filename:
|
|
|
|
filename = self.local_filename(create=False)
|
|
|
|
if filename is not None:
|
|
|
|
filename, ext = os.path.splitext(filename)
|
|
|
|
# if we can't detect the extension from the url fallback on the mimetype
|
|
|
|
if ext == '' or util.file_type_by_extension(ext) is None:
|
|
|
|
ext = util.extension_from_mimetype(self.mime_type)
|
|
|
|
return ext
|
2006-03-24 20:08:59 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def mark_new(self):
|
|
|
|
self.is_new = True
|
2011-07-16 14:30:08 +02:00
|
|
|
self.save()
|
2010-04-24 18:51:19 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def mark_old(self):
|
|
|
|
self.is_new = False
|
2011-07-16 14:30:08 +02:00
|
|
|
self.save()
|
2010-04-24 18:51:19 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def file_exists(self):
|
|
|
|
filename = self.local_filename(create=False, check_only=True)
|
|
|
|
if filename is None:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return os.path.exists(filename)
|
2010-04-24 18:51:19 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def was_downloaded(self, and_exists=False):
|
|
|
|
if self.state != gpodder.STATE_DOWNLOADED:
|
|
|
|
return False
|
|
|
|
if and_exists and not self.file_exists():
|
|
|
|
return False
|
|
|
|
return True
|
2010-04-24 18:51:19 +02:00
|
|
|
|
2012-07-02 05:53:33 +02:00
|
|
|
def sync_filename(self, use_custom=False, custom_format=None):
|
|
|
|
if use_custom:
|
|
|
|
return util.object_string_formatter(custom_format,
|
|
|
|
episode=self, podcast=self.channel)
|
|
|
|
else:
|
|
|
|
return self.title
|
2010-04-24 18:51:19 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def file_type(self):
|
2012-01-03 16:37:41 +01:00
|
|
|
# Assume all YouTube/Vimeo links are video files
|
2014-09-19 17:57:30 +02:00
|
|
|
if youtube.is_video_link(self.url) or vimeo.is_video_link(self.url) or escapist_videos.is_video_link(self.url):
|
2010-12-20 15:17:48 +01:00
|
|
|
return 'video'
|
2010-04-24 18:51:19 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
return util.file_type_by_extension(self.extension())
|
2010-02-05 21:03:34 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
@property
|
|
|
|
def basename( self):
|
|
|
|
return os.path.splitext( os.path.basename( self.url))[0]
|
2010-02-05 21:03:34 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
@property
|
|
|
|
def pubtime(self):
|
2009-03-10 14:59:01 +01:00
|
|
|
"""
|
2010-12-20 15:17:48 +01:00
|
|
|
Returns published time as HHMM (or 0000 if not available)
|
2009-03-10 14:59:01 +01:00
|
|
|
"""
|
2010-12-20 15:17:48 +01:00
|
|
|
try:
|
|
|
|
return datetime.datetime.fromtimestamp(self.published).strftime('%H%M')
|
|
|
|
except:
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.warn('Cannot format pubtime: %s', self.title, exc_info=True)
|
2010-12-20 15:17:48 +01:00
|
|
|
return '0000'
|
2008-06-30 03:10:18 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def playlist_title(self):
|
|
|
|
"""Return a title for this episode in a playlist
|
2009-12-22 01:26:44 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
The title will be composed of the podcast name, the
|
|
|
|
episode name and the publication date. The return
|
|
|
|
value is the canonical representation of this episode
|
|
|
|
in playlists (for example, M3U playlists).
|
|
|
|
"""
|
|
|
|
return '%s - %s (%s)' % (self.channel.title, \
|
|
|
|
self.title, \
|
|
|
|
self.cute_pubdate())
|
2010-02-28 04:10:39 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def cute_pubdate(self):
|
|
|
|
result = util.format_date(self.published)
|
|
|
|
if result is None:
|
|
|
|
return '(%s)' % _('unknown')
|
2010-09-27 01:32:17 +02:00
|
|
|
else:
|
2010-12-20 15:17:48 +01:00
|
|
|
return result
|
2012-12-28 18:33:13 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
pubdate_prop = property(fget=cute_pubdate)
|
2010-02-28 04:10:39 +01:00
|
|
|
|
2013-02-12 12:41:57 +01:00
|
|
|
def published_datetime(self):
|
|
|
|
return datetime.datetime.fromtimestamp(self.published)
|
|
|
|
|
2012-08-30 03:41:46 +02:00
|
|
|
@property
|
|
|
|
def sortdate(self):
|
2013-04-06 11:29:30 +02:00
|
|
|
return self.published_datetime().strftime('%Y-%m-%d')
|
2013-02-12 12:41:57 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def pubdate_day(self):
|
|
|
|
return self.published_datetime().strftime('%d')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def pubdate_month(self):
|
|
|
|
return self.published_datetime().strftime('%m')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def pubdate_year(self):
|
|
|
|
return self.published_datetime().strftime('%y')
|
2012-08-30 03:41:46 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def is_finished(self):
|
|
|
|
"""Return True if this episode is considered "finished playing"
|
2010-02-28 04:10:39 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
An episode is considered "finished" when there is a
|
|
|
|
current position mark on the track, and when the
|
|
|
|
current position is greater than 99 percent of the
|
|
|
|
total time or inside the last 10 seconds of a track.
|
|
|
|
"""
|
2012-01-19 11:55:25 +01:00
|
|
|
return self.current_position > 0 and self.total_time > 0 and \
|
2010-12-20 15:17:48 +01:00
|
|
|
(self.current_position + 10 >= self.total_time or \
|
|
|
|
self.current_position >= self.total_time*.99)
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2011-04-11 13:09:50 +02:00
|
|
|
def get_play_info_string(self, duration_only=False):
|
2010-12-20 15:17:48 +01:00
|
|
|
duration = util.format_time(self.total_time)
|
2011-04-11 13:09:50 +02:00
|
|
|
if duration_only and self.total_time > 0:
|
|
|
|
return duration
|
2014-01-30 01:05:58 +01:00
|
|
|
elif self.is_finished():
|
|
|
|
return '%s (%s)' % (_('Finished'), duration)
|
2011-04-11 13:09:50 +02:00
|
|
|
elif self.current_position > 0 and \
|
2011-02-11 15:57:03 +01:00
|
|
|
self.current_position != self.total_time:
|
2010-12-20 15:17:48 +01:00
|
|
|
position = util.format_time(self.current_position)
|
|
|
|
return '%s / %s' % (position, duration)
|
2011-02-06 13:46:20 +01:00
|
|
|
elif self.total_time > 0:
|
2010-12-20 15:17:48 +01:00
|
|
|
return duration
|
2011-02-06 13:46:20 +01:00
|
|
|
else:
|
|
|
|
return '-'
|
2010-05-18 11:28:55 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def update_from(self, episode):
|
2017-03-25 16:34:05 +01:00
|
|
|
for k in ('title', 'url', 'description', 'description_html', 'link', 'published', 'guid', 'file_size', 'payment_url'):
|
2010-12-20 15:17:48 +01:00
|
|
|
setattr(self, k, getattr(episode, k))
|
2010-08-20 23:36:57 +02:00
|
|
|
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
class PodcastChannel(PodcastModelObject):
|
2011-12-05 16:32:03 +01:00
|
|
|
__slots__ = schema.PodcastColumns + ('_common_prefix',)
|
2011-07-16 17:26:04 +02:00
|
|
|
|
2016-11-21 23:13:46 +01:00
|
|
|
UNICODE_TRANSLATE = {ord('ö'): 'o', ord('ä'): 'a', ord('ü'): 'u'}
|
2011-07-16 20:51:26 +02:00
|
|
|
|
2012-10-13 16:21:25 +02:00
|
|
|
# Enumerations for download strategy
|
2016-11-21 23:13:46 +01:00
|
|
|
STRATEGY_DEFAULT, STRATEGY_LATEST = list(range(2))
|
2012-10-13 16:21:25 +02:00
|
|
|
|
|
|
|
# Description and ordering of strategies
|
|
|
|
STRATEGIES = [
|
|
|
|
(STRATEGY_DEFAULT, _('Default')),
|
|
|
|
(STRATEGY_LATEST, _('Only keep latest')),
|
|
|
|
]
|
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
MAX_FOLDERNAME_LENGTH = 60
|
2010-12-20 15:17:48 +01:00
|
|
|
SECONDS_PER_WEEK = 7*24*60*60
|
|
|
|
EpisodeClass = PodcastEpisode
|
2010-05-18 11:28:55 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
feed_fetcher = gPodderFetcher()
|
2009-09-09 19:53:26 +02:00
|
|
|
|
2012-12-28 18:31:39 +01:00
|
|
|
def __init__(self, model, id=None):
|
2011-10-12 19:59:09 +02:00
|
|
|
self.parent = model
|
2012-12-28 18:31:39 +01:00
|
|
|
self.children = []
|
2011-07-16 17:26:04 +02:00
|
|
|
|
2012-12-28 18:31:39 +01:00
|
|
|
self.id = id
|
2011-07-16 17:26:04 +02:00
|
|
|
self.url = None
|
|
|
|
self.title = ''
|
|
|
|
self.link = ''
|
|
|
|
self.description = ''
|
|
|
|
self.cover_url = None
|
2012-05-24 23:17:55 +02:00
|
|
|
self.payment_url = None
|
2011-07-16 17:26:04 +02:00
|
|
|
|
|
|
|
self.auth_username = ''
|
|
|
|
self.auth_password = ''
|
|
|
|
|
|
|
|
self.http_last_modified = None
|
|
|
|
self.http_etag = None
|
|
|
|
|
|
|
|
self.auto_archive_episodes = False
|
|
|
|
self.download_folder = None
|
|
|
|
self.pause_subscription = False
|
2012-08-30 04:35:43 +02:00
|
|
|
self.sync_to_mp3_player = True
|
2016-11-20 11:54:05 +01:00
|
|
|
self.cover_thumb = None
|
2011-07-16 17:26:04 +02:00
|
|
|
|
2011-07-27 15:36:04 +02:00
|
|
|
self.section = _('Other')
|
2011-12-05 16:32:03 +01:00
|
|
|
self._common_prefix = None
|
2012-10-13 16:21:25 +02:00
|
|
|
self.download_strategy = PodcastChannel.STRATEGY_DEFAULT
|
2011-07-27 15:36:04 +02:00
|
|
|
|
2012-12-28 18:31:39 +01:00
|
|
|
if self.id:
|
|
|
|
self.children = self.db.load_episodes(self, self.episode_factory)
|
|
|
|
self._determine_common_prefix()
|
|
|
|
|
2011-10-17 13:54:49 +02:00
|
|
|
@property
|
|
|
|
def model(self):
|
|
|
|
return self.parent
|
|
|
|
|
2011-10-12 19:59:09 +02:00
|
|
|
@property
|
|
|
|
def db(self):
|
|
|
|
return self.parent.db
|
2011-07-16 17:26:04 +02:00
|
|
|
|
2012-10-13 16:21:25 +02:00
|
|
|
def get_download_strategies(self):
|
|
|
|
for value, caption in PodcastChannel.STRATEGIES:
|
|
|
|
yield self.download_strategy == value, value, caption
|
|
|
|
|
|
|
|
def set_download_strategy(self, download_strategy):
|
|
|
|
if download_strategy == self.download_strategy:
|
|
|
|
return
|
|
|
|
|
|
|
|
caption = dict(self.STRATEGIES).get(download_strategy)
|
|
|
|
if caption is not None:
|
|
|
|
logger.debug('Strategy for %s changed to %s', self.title, caption)
|
|
|
|
self.download_strategy = download_strategy
|
|
|
|
else:
|
|
|
|
logger.warn('Cannot set strategy to %d', download_strategy)
|
|
|
|
|
2014-04-01 22:35:39 +02:00
|
|
|
def rewrite_url(self, new_url):
|
|
|
|
new_url = util.normalize_feed_url(new_url)
|
|
|
|
if new_url is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
self.url = new_url
|
|
|
|
self.http_etag = None
|
|
|
|
self.http_last_modified = None
|
|
|
|
self.save()
|
|
|
|
return new_url
|
|
|
|
|
2011-10-22 17:50:10 +02:00
|
|
|
def check_download_folder(self):
|
2011-02-25 00:45:49 +01:00
|
|
|
"""Check the download folder for externally-downloaded files
|
|
|
|
|
|
|
|
This will try to assign downloaded files with episodes in the
|
2012-09-05 11:05:57 +02:00
|
|
|
database.
|
2011-10-22 17:50:10 +02:00
|
|
|
|
|
|
|
This will also cause missing files to be marked as deleted.
|
2011-02-25 00:45:49 +01:00
|
|
|
"""
|
2011-10-22 17:50:10 +02:00
|
|
|
known_files = set()
|
|
|
|
|
2012-12-28 18:31:39 +01:00
|
|
|
for episode in self.get_episodes(gpodder.STATE_DOWNLOADED):
|
2011-10-22 17:50:10 +02:00
|
|
|
if episode.was_downloaded():
|
|
|
|
filename = episode.local_filename(create=False)
|
2014-01-30 08:29:38 +01:00
|
|
|
if filename is None:
|
|
|
|
# No filename has been determined for this episode
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not os.path.exists(filename):
|
2011-10-22 17:50:10 +02:00
|
|
|
# File has been deleted by the user - simulate a
|
|
|
|
# delete event (also marks the episode as deleted)
|
|
|
|
logger.debug('Episode deleted: %s', filename)
|
|
|
|
episode.delete_from_disk()
|
|
|
|
continue
|
|
|
|
|
|
|
|
known_files.add(filename)
|
|
|
|
|
2011-02-25 00:45:49 +01:00
|
|
|
existing_files = set(filename for filename in \
|
2011-02-25 19:14:43 +01:00
|
|
|
glob.glob(os.path.join(self.save_dir, '*')) \
|
|
|
|
if not filename.endswith('.partial'))
|
2012-03-05 10:44:19 +01:00
|
|
|
|
|
|
|
ignore_files = ['folder'+ext for ext in
|
2012-09-05 11:05:57 +02:00
|
|
|
coverart.CoverDownloader.EXTENSIONS]
|
2012-03-05 10:44:19 +01:00
|
|
|
|
|
|
|
external_files = existing_files.difference(list(known_files) +
|
|
|
|
[os.path.join(self.save_dir, ignore_file)
|
|
|
|
for ignore_file in ignore_files])
|
2011-02-25 00:45:49 +01:00
|
|
|
if not external_files:
|
2011-10-22 17:50:10 +02:00
|
|
|
return
|
2011-02-25 00:45:49 +01:00
|
|
|
|
|
|
|
all_episodes = self.get_all_episodes()
|
|
|
|
|
|
|
|
for filename in external_files:
|
|
|
|
found = False
|
|
|
|
|
|
|
|
basename = os.path.basename(filename)
|
2011-07-16 14:30:08 +02:00
|
|
|
existing = [e for e in all_episodes if e.download_filename == basename]
|
2011-02-25 00:45:49 +01:00
|
|
|
if existing:
|
2011-07-16 14:30:08 +02:00
|
|
|
existing = existing[0]
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Importing external download: %s', filename)
|
2011-02-25 00:45:49 +01:00
|
|
|
existing.on_downloaded(filename)
|
|
|
|
continue
|
|
|
|
|
|
|
|
for episode in all_episodes:
|
|
|
|
wanted_filename = episode.local_filename(create=True, \
|
|
|
|
return_wanted_filename=True)
|
|
|
|
if basename == wanted_filename:
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Importing external download: %s', filename)
|
2011-02-25 00:45:49 +01:00
|
|
|
episode.download_filename = basename
|
|
|
|
episode.on_downloaded(filename)
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
|
|
|
|
wanted_base, wanted_ext = os.path.splitext(wanted_filename)
|
|
|
|
target_base, target_ext = os.path.splitext(basename)
|
|
|
|
if wanted_base == target_base:
|
|
|
|
# Filenames only differ by the extension
|
|
|
|
wanted_type = util.file_type_by_extension(wanted_ext)
|
|
|
|
target_type = util.file_type_by_extension(target_ext)
|
|
|
|
|
|
|
|
# If wanted type is None, assume that we don't know
|
|
|
|
# the right extension before the download (e.g. YouTube)
|
|
|
|
# if the wanted type is the same as the target type,
|
|
|
|
# assume that it's the correct file
|
|
|
|
if wanted_type is None or wanted_type == target_type:
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Importing external download: %s', filename)
|
2011-02-25 00:45:49 +01:00
|
|
|
episode.download_filename = basename
|
|
|
|
episode.on_downloaded(filename)
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
|
2012-12-07 20:02:21 +01:00
|
|
|
if not found and not util.is_system_file(filename):
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.warn('Unknown external file: %s', filename)
|
2011-02-25 00:45:49 +01:00
|
|
|
|
2011-07-16 20:51:26 +02:00
|
|
|
@classmethod
|
|
|
|
def sort_key(cls, podcast):
|
2012-01-10 13:47:20 +01:00
|
|
|
key = util.convert_bytes(podcast.title.lower())
|
2011-07-16 20:51:26 +02:00
|
|
|
return re.sub('^the ', '', key).translate(cls.UNICODE_TRANSLATE)
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
@classmethod
|
2016-11-21 23:13:46 +01:00
|
|
|
def load(cls, model, url, create=True, authentication_tokens=None, max_episodes=0):
|
|
|
|
existing = [p for p in model.get_podcasts() if p.url == url]
|
2011-07-16 14:30:08 +02:00
|
|
|
|
|
|
|
if existing:
|
|
|
|
return existing[0]
|
|
|
|
|
|
|
|
if create:
|
2011-10-12 19:59:09 +02:00
|
|
|
tmp = cls(model)
|
2010-12-20 15:17:48 +01:00
|
|
|
tmp.url = url
|
|
|
|
if authentication_tokens is not None:
|
|
|
|
tmp.auth_username = authentication_tokens[0]
|
|
|
|
tmp.auth_password = authentication_tokens[1]
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2011-07-24 18:40:08 +02:00
|
|
|
# Save podcast, so it gets an ID assigned before
|
|
|
|
# updating the feed and adding saving episodes
|
|
|
|
tmp.save()
|
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
try:
|
2012-01-03 23:59:19 +01:00
|
|
|
tmp.update(max_episodes)
|
2016-11-21 23:13:46 +01:00
|
|
|
except Exception as e:
|
2011-08-07 23:39:46 +02:00
|
|
|
logger.debug('Fetch failed. Removing buggy feed.')
|
|
|
|
tmp.remove_downloaded()
|
|
|
|
tmp.delete()
|
|
|
|
raise
|
|
|
|
|
|
|
|
# Determine the section in which this podcast should appear
|
2011-07-27 15:36:04 +02:00
|
|
|
tmp.section = tmp._get_content_type()
|
2011-02-25 00:45:49 +01:00
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
# Determine a new download folder now that we have the title
|
|
|
|
tmp.get_save_dir(force_new=True)
|
|
|
|
|
2011-02-25 00:45:49 +01:00
|
|
|
# Mark episodes as downloaded if files already exist (bug 902)
|
2011-10-22 17:50:10 +02:00
|
|
|
tmp.check_download_folder()
|
2011-02-25 00:45:49 +01:00
|
|
|
|
2012-01-05 13:28:09 +01:00
|
|
|
# Determine common prefix of episode titles
|
|
|
|
tmp._determine_common_prefix()
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
tmp.save()
|
2011-10-17 13:54:49 +02:00
|
|
|
|
2012-02-04 21:43:37 +01:00
|
|
|
gpodder.user_extensions.on_podcast_subscribe(tmp)
|
2011-10-19 12:37:55 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
return tmp
|
2010-02-28 04:10:39 +01:00
|
|
|
|
2011-07-16 14:30:08 +02:00
|
|
|
def episode_factory(self, d):
|
2010-12-20 15:17:48 +01:00
|
|
|
"""
|
|
|
|
This function takes a dictionary containing key-value pairs for
|
|
|
|
episodes and returns a new PodcastEpisode object that is connected
|
|
|
|
to this object.
|
2008-03-02 13:56:16 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
Returns: A new PodcastEpisode object
|
|
|
|
"""
|
|
|
|
return self.EpisodeClass.create_from_dict(d, self)
|
2008-03-02 13:56:16 +01:00
|
|
|
|
2011-08-08 13:06:20 +02:00
|
|
|
def _consume_updated_title(self, new_title):
|
|
|
|
# Replace multi-space and newlines with single space (Maemo bug 11173)
|
|
|
|
new_title = re.sub('\s+', ' ', new_title).strip()
|
|
|
|
|
|
|
|
# Only update the podcast-supplied title when we
|
|
|
|
# don't yet have a title, or if the title is the
|
|
|
|
# feed URL (e.g. we didn't find a title before).
|
|
|
|
if not self.title or self.title == self.url:
|
|
|
|
self.title = new_title
|
|
|
|
|
2012-07-10 11:57:59 +02:00
|
|
|
# Start YouTube- and Vimeo-specific title FIX
|
|
|
|
YOUTUBE_PREFIX = 'Uploads by '
|
|
|
|
VIMEO_PREFIX = 'Vimeo / '
|
|
|
|
if self.title.startswith(YOUTUBE_PREFIX):
|
|
|
|
self.title = self.title[len(YOUTUBE_PREFIX):] + ' on YouTube'
|
|
|
|
elif self.title.startswith(VIMEO_PREFIX):
|
|
|
|
self.title = self.title[len(VIMEO_PREFIX):] + ' on Vimeo'
|
|
|
|
# End YouTube- and Vimeo-specific title FIX
|
|
|
|
|
2012-12-28 15:34:20 +01:00
|
|
|
def _consume_metadata(self, title, link, description, cover_url,
|
|
|
|
payment_url):
|
|
|
|
self._consume_updated_title(title)
|
|
|
|
self.link = link
|
|
|
|
self.description = description
|
|
|
|
self.cover_url = cover_url
|
|
|
|
self.payment_url = payment_url
|
2010-12-20 15:17:48 +01:00
|
|
|
self.save()
|
2010-02-28 04:10:39 +01:00
|
|
|
|
2012-12-28 15:34:20 +01:00
|
|
|
def _consume_custom_feed(self, custom_feed, max_episodes=0):
|
|
|
|
self._consume_metadata(custom_feed.get_title(),
|
|
|
|
custom_feed.get_link(),
|
|
|
|
custom_feed.get_description(),
|
|
|
|
custom_feed.get_image(),
|
|
|
|
None)
|
|
|
|
|
2011-12-04 14:40:45 +01:00
|
|
|
existing = self.get_all_episodes()
|
|
|
|
existing_guids = [episode.guid for episode in existing]
|
2010-02-28 04:10:39 +01:00
|
|
|
|
2011-07-26 18:44:08 +02:00
|
|
|
# Insert newly-found episodes into the database + local cache
|
2012-12-28 15:34:20 +01:00
|
|
|
new_episodes, seen_guids = custom_feed.get_new_episodes(self, existing_guids)
|
2011-12-04 14:40:45 +01:00
|
|
|
self.children.extend(new_episodes)
|
2007-08-20 15:45:46 +02:00
|
|
|
|
2011-12-04 14:40:45 +01:00
|
|
|
self.remove_unreachable_episodes(existing, seen_guids, max_episodes)
|
2007-08-27 00:04:50 +02:00
|
|
|
|
2012-01-03 23:59:19 +01:00
|
|
|
def _consume_updated_feed(self, feed, max_episodes=0):
|
2016-02-06 17:46:07 +01:00
|
|
|
self._consume_metadata(feed.get('title', self.url),
|
|
|
|
feed.get('link', self.link),
|
|
|
|
feed.get('description', ''),
|
|
|
|
feed.get('cover_url', None),
|
|
|
|
feed.get('payment_url', None))
|
2010-02-23 15:44:17 +01:00
|
|
|
|
2016-12-18 13:04:25 +01:00
|
|
|
# Update values for HTTP conditional requests
|
|
|
|
headers = feed.get('headers', {})
|
|
|
|
self.http_etag = headers.get('etag', self.http_etag)
|
|
|
|
self.http_last_modified = headers.get('last-modified', self.http_last_modified)
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Load all episodes to update them properly.
|
|
|
|
existing = self.get_all_episodes()
|
2010-02-23 15:44:17 +01:00
|
|
|
|
2012-12-28 15:34:20 +01:00
|
|
|
# We have to sort the entries in descending chronological order,
|
|
|
|
# because if the feed lists items in ascending order and has >
|
|
|
|
# max_episodes old episodes, new episodes will not be shown.
|
|
|
|
# See also: gPodder Bug 1186
|
2016-02-06 17:46:07 +01:00
|
|
|
entries = sorted(feed.get('episodes', []), key=lambda episode: episode['published'], reverse=True)
|
2008-06-30 03:10:18 +02:00
|
|
|
|
2012-10-13 16:21:25 +02:00
|
|
|
# We can limit the maximum number of entries that gPodder will parse
|
|
|
|
if max_episodes > 0 and len(entries) > max_episodes:
|
|
|
|
entries = entries[:max_episodes]
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# GUID-based existing episode list
|
|
|
|
existing_guids = dict((e.guid, e) for e in existing)
|
|
|
|
|
|
|
|
# Get most recent published of all episodes
|
|
|
|
last_published = self.db.get_last_published(self) or 0
|
|
|
|
|
2011-04-04 11:43:53 +02:00
|
|
|
# Keep track of episode GUIDs currently seen in the feed
|
|
|
|
seen_guids = set()
|
|
|
|
|
2012-10-13 16:21:25 +02:00
|
|
|
# Number of new episodes found
|
|
|
|
new_episodes = 0
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Search all entries for new episodes
|
|
|
|
for entry in entries:
|
2016-02-06 17:46:07 +01:00
|
|
|
episode = self.EpisodeClass.from_podcastparser_entry(entry, self)
|
2012-12-28 15:34:20 +01:00
|
|
|
if episode is not None:
|
|
|
|
seen_guids.add(episode.guid)
|
|
|
|
else:
|
2010-12-20 15:17:48 +01:00
|
|
|
continue
|
2010-04-26 21:41:50 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Detect (and update) existing episode based on GUIDs
|
|
|
|
existing_episode = existing_guids.get(episode.guid, None)
|
|
|
|
if existing_episode:
|
|
|
|
existing_episode.update_from(episode)
|
|
|
|
existing_episode.save()
|
|
|
|
continue
|
2008-06-30 03:10:18 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Workaround for bug 340: If the episode has been
|
|
|
|
# published earlier than one week before the most
|
|
|
|
# recent existing episode, do not mark it as new.
|
|
|
|
if episode.published < last_published - self.SECONDS_PER_WEEK:
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.debug('Episode with old date: %s', episode.title)
|
2010-12-20 15:17:48 +01:00
|
|
|
episode.is_new = False
|
2009-08-13 20:39:00 +02:00
|
|
|
|
2012-12-28 15:34:20 +01:00
|
|
|
if episode.is_new:
|
|
|
|
new_episodes += 1
|
2010-12-20 15:17:48 +01:00
|
|
|
|
2012-12-28 15:34:20 +01:00
|
|
|
# Only allow a certain number of new episodes per update
|
|
|
|
if (self.download_strategy == PodcastChannel.STRATEGY_LATEST and
|
|
|
|
new_episodes > 1):
|
|
|
|
episode.is_new = False
|
|
|
|
|
|
|
|
episode.save()
|
|
|
|
self.children.append(episode)
|
2011-07-16 19:33:01 +02:00
|
|
|
|
2011-12-04 14:40:45 +01:00
|
|
|
self.remove_unreachable_episodes(existing, seen_guids, max_episodes)
|
|
|
|
|
|
|
|
def remove_unreachable_episodes(self, existing, seen_guids, max_episodes):
|
2010-12-20 15:17:48 +01:00
|
|
|
# Remove "unreachable" episodes - episodes that have not been
|
|
|
|
# downloaded and that the feed does not list as downloadable anymore
|
2012-01-06 13:27:09 +01:00
|
|
|
# Keep episodes that are currently being downloaded, though (bug 1534)
|
2010-12-20 15:17:48 +01:00
|
|
|
if self.id is not None:
|
2012-01-06 13:27:09 +01:00
|
|
|
episodes_to_purge = (e for e in existing if
|
|
|
|
e.state != gpodder.STATE_DOWNLOADED and
|
|
|
|
e.guid not in seen_guids and not e.downloading)
|
2011-04-04 11:43:53 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
for episode in episodes_to_purge:
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.debug('Episode removed from feed: %s (%s)',
|
|
|
|
episode.title, episode.guid)
|
2012-02-04 21:43:37 +01:00
|
|
|
gpodder.user_extensions.on_episode_removed_from_podcast(episode)
|
2010-12-20 15:17:48 +01:00
|
|
|
self.db.delete_episode_by_guid(episode.guid, self.id)
|
2009-09-08 21:35:36 +02:00
|
|
|
|
2011-07-16 19:33:01 +02:00
|
|
|
# Remove the episode from the "children" episodes list
|
|
|
|
if self.children is not None:
|
|
|
|
self.children.remove(episode)
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# This *might* cause episodes to be skipped if there were more than
|
|
|
|
# max_episodes_per_feed items added to the feed between updates.
|
|
|
|
# The benefit is that it prevents old episodes from apearing as new
|
|
|
|
# in certain situations (see bug #340).
|
2011-07-16 19:33:01 +02:00
|
|
|
self.db.purge(max_episodes, self.id) # TODO: Remove from self.children!
|
|
|
|
|
|
|
|
# Sort episodes by pubdate, descending
|
|
|
|
self.children.sort(key=lambda e: e.published, reverse=True)
|
2009-09-08 21:35:36 +02:00
|
|
|
|
2012-01-03 23:59:19 +01:00
|
|
|
def update(self, max_episodes=0):
|
2010-12-20 15:17:48 +01:00
|
|
|
try:
|
2012-07-10 11:57:59 +02:00
|
|
|
result = self.feed_fetcher.fetch_channel(self)
|
|
|
|
|
|
|
|
if result.status == feedcore.CUSTOM_FEED:
|
|
|
|
self._consume_custom_feed(result.feed, max_episodes)
|
|
|
|
elif result.status == feedcore.UPDATED_FEED:
|
|
|
|
self._consume_updated_feed(result.feed, max_episodes)
|
|
|
|
elif result.status == feedcore.NEW_LOCATION:
|
2016-02-06 17:46:07 +01:00
|
|
|
url = result.feed
|
2012-07-10 11:57:59 +02:00
|
|
|
logger.info('New feed location: %s => %s', self.url, url)
|
|
|
|
if url in set(x.url for x in self.model.get_podcasts()):
|
|
|
|
raise Exception('Already subscribed to ' + url)
|
|
|
|
self.url = url
|
2016-02-06 17:46:07 +01:00
|
|
|
# With the updated URL, fetch the feed again
|
|
|
|
self.update(max_episodes)
|
|
|
|
return
|
2012-07-10 11:57:59 +02:00
|
|
|
elif result.status == feedcore.NOT_MODIFIED:
|
|
|
|
pass
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
self.save()
|
2016-11-21 23:13:46 +01:00
|
|
|
except Exception as e:
|
2010-12-20 15:17:48 +01:00
|
|
|
# "Not really" errors
|
|
|
|
#feedcore.AuthenticationRequired
|
|
|
|
# Temporary errors
|
|
|
|
#feedcore.Offline
|
|
|
|
#feedcore.BadRequest
|
|
|
|
#feedcore.InternalServerError
|
|
|
|
#feedcore.WifiLogin
|
|
|
|
# Permanent errors
|
|
|
|
#feedcore.Unsubscribe
|
|
|
|
#feedcore.NotFound
|
|
|
|
#feedcore.InvalidFeed
|
|
|
|
#feedcore.UnknownStatusCode
|
2012-02-04 21:43:37 +01:00
|
|
|
gpodder.user_extensions.on_podcast_update_failed(self, e)
|
2010-12-20 15:17:48 +01:00
|
|
|
raise
|
2010-08-23 23:31:47 +02:00
|
|
|
|
2012-02-04 21:43:37 +01:00
|
|
|
gpodder.user_extensions.on_podcast_updated(self)
|
2007-12-10 09:41:17 +01:00
|
|
|
|
2012-01-05 13:28:09 +01:00
|
|
|
# Re-determine the common prefix for all episodes
|
|
|
|
self._determine_common_prefix()
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
self.db.commit()
|
2007-12-10 09:41:17 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def delete(self):
|
|
|
|
self.db.delete_podcast(self)
|
2011-10-17 13:54:49 +02:00
|
|
|
self.model._remove_podcast(self)
|
2011-10-12 19:59:09 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def save(self):
|
2011-07-24 18:40:08 +02:00
|
|
|
if self.download_folder is None:
|
|
|
|
self.get_save_dir()
|
|
|
|
|
2012-02-04 21:43:37 +01:00
|
|
|
gpodder.user_extensions.on_podcast_save(self)
|
2011-07-16 17:26:04 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
self.db.save_podcast(self)
|
2011-10-17 13:54:49 +02:00
|
|
|
self.model._append_podcast(self)
|
2010-12-20 15:17:48 +01:00
|
|
|
|
|
|
|
def get_statistics(self):
|
|
|
|
if self.id is None:
|
|
|
|
return (0, 0, 0, 0, 0)
|
2006-11-20 12:51:20 +01:00
|
|
|
else:
|
2010-12-20 15:17:48 +01:00
|
|
|
return self.db.get_podcast_statistics(self.id)
|
2006-12-06 21:25:26 +01:00
|
|
|
|
2011-07-27 15:36:04 +02:00
|
|
|
@property
|
|
|
|
def group_by(self):
|
|
|
|
if not self.section:
|
|
|
|
self.section = self._get_content_type()
|
|
|
|
self.save()
|
|
|
|
|
|
|
|
return self.section
|
|
|
|
|
2011-02-04 19:04:36 +01:00
|
|
|
def _get_content_type(self):
|
2014-09-19 17:57:30 +02:00
|
|
|
if 'youtube.com' in self.url or 'vimeo.com' in self.url or 'escapistmagazine.com' in self.url:
|
2011-07-27 15:36:04 +02:00
|
|
|
return _('Video')
|
|
|
|
|
|
|
|
audio, video, other = 0, 0, 0
|
|
|
|
for content_type in self.db.get_content_types(self.id):
|
|
|
|
content_type = content_type.lower()
|
|
|
|
if content_type.startswith('audio'):
|
|
|
|
audio += 1
|
|
|
|
elif content_type.startswith('video'):
|
|
|
|
video += 1
|
|
|
|
else:
|
|
|
|
other += 1
|
|
|
|
|
|
|
|
if audio >= video:
|
|
|
|
return _('Audio')
|
|
|
|
elif video > other:
|
|
|
|
return _('Video')
|
2011-02-04 19:04:36 +01:00
|
|
|
|
2011-07-27 15:36:04 +02:00
|
|
|
return _('Other')
|
2011-02-04 19:04:36 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def authenticate_url(self, url):
|
|
|
|
return util.url_add_authentication(url, self.auth_username, self.auth_password)
|
2007-11-08 20:11:57 +01:00
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
def rename(self, new_title):
|
|
|
|
new_title = new_title.strip()
|
|
|
|
if self.title == new_title:
|
2010-12-20 15:17:48 +01:00
|
|
|
return
|
2009-02-14 13:31:27 +01:00
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
new_folder_name = self.find_unique_folder_name(new_title)
|
|
|
|
if new_folder_name and new_folder_name != self.download_folder:
|
2010-12-20 15:17:48 +01:00
|
|
|
new_folder = os.path.join(gpodder.downloads, new_folder_name)
|
|
|
|
old_folder = os.path.join(gpodder.downloads, self.download_folder)
|
|
|
|
if os.path.exists(old_folder):
|
|
|
|
if not os.path.exists(new_folder):
|
|
|
|
# Old folder exists, new folder does not -> simply rename
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Renaming %s => %s', old_folder, new_folder)
|
2010-12-20 15:17:48 +01:00
|
|
|
os.rename(old_folder, new_folder)
|
|
|
|
else:
|
|
|
|
# Both folders exist -> move files and delete old folder
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Moving files from %s to %s', old_folder,
|
|
|
|
new_folder)
|
2010-12-20 15:17:48 +01:00
|
|
|
for file in glob.glob(os.path.join(old_folder, '*')):
|
|
|
|
shutil.move(file, new_folder)
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Removing %s', old_folder)
|
2010-12-20 15:17:48 +01:00
|
|
|
shutil.rmtree(old_folder, ignore_errors=True)
|
|
|
|
self.download_folder = new_folder_name
|
2009-02-09 23:26:47 +01:00
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
self.title = new_title
|
|
|
|
self.save()
|
2009-05-08 14:55:48 +02:00
|
|
|
|
2012-01-05 13:28:09 +01:00
|
|
|
def _determine_common_prefix(self):
|
2012-01-09 19:04:35 +01:00
|
|
|
# We need at least 2 episodes for the prefix to be "common" ;)
|
|
|
|
if len(self.children) < 2:
|
|
|
|
self._common_prefix = ''
|
|
|
|
return
|
|
|
|
|
2012-01-05 13:28:09 +01:00
|
|
|
prefix = os.path.commonprefix([x.title for x in self.children])
|
|
|
|
# The common prefix must end with a space - otherwise it's not
|
|
|
|
# on a word boundary, and we might end up chopping off too much
|
|
|
|
if prefix and prefix[-1] != ' ':
|
|
|
|
prefix = prefix[:prefix.rfind(' ')+1]
|
|
|
|
|
|
|
|
self._common_prefix = prefix
|
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def get_all_episodes(self):
|
2011-07-16 17:26:04 +02:00
|
|
|
return self.children
|
2009-09-21 23:34:12 +02:00
|
|
|
|
2012-12-28 18:31:39 +01:00
|
|
|
def get_episodes(self, state):
|
2016-11-21 23:13:46 +01:00
|
|
|
return [e for e in self.get_all_episodes() if e.state == state]
|
2012-12-28 18:31:39 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def find_unique_folder_name(self, download_folder):
|
|
|
|
# Remove trailing dots to avoid errors on Windows (bug 600)
|
2011-08-07 23:39:46 +02:00
|
|
|
# Also remove leading dots to avoid hidden folders on Linux
|
|
|
|
download_folder = download_folder.strip('.' + string.whitespace)
|
2008-06-30 03:10:18 +02:00
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
for folder_name in util.generate_names(download_folder):
|
|
|
|
if (not self.db.podcast_download_folder_exists(folder_name) or
|
|
|
|
self.download_folder == folder_name):
|
|
|
|
return folder_name
|
2008-06-30 03:10:18 +02:00
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
def get_save_dir(self, force_new=False):
|
|
|
|
if self.download_folder is None or force_new:
|
2010-12-20 15:17:48 +01:00
|
|
|
# we must change the folder name, because it has not been set manually
|
|
|
|
fn_template = util.sanitize_filename(self.title, self.MAX_FOLDERNAME_LENGTH)
|
2008-06-30 03:10:18 +02:00
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
if not fn_template:
|
|
|
|
fn_template = util.sanitize_filename(self.url, self.MAX_FOLDERNAME_LENGTH)
|
2010-04-03 00:39:43 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Find a unique folder name for this podcast
|
2011-08-07 23:39:46 +02:00
|
|
|
download_folder = self.find_unique_folder_name(fn_template)
|
2007-09-08 16:49:54 +02:00
|
|
|
|
2011-08-07 23:39:46 +02:00
|
|
|
# Try removing the download folder if it has been created previously
|
|
|
|
if self.download_folder is not None:
|
|
|
|
folder = os.path.join(gpodder.downloads, self.download_folder)
|
|
|
|
try:
|
|
|
|
os.rmdir(folder)
|
|
|
|
except OSError:
|
|
|
|
logger.info('Old download folder is kept for %s', self.url)
|
2008-12-24 11:54:21 +01:00
|
|
|
|
2011-07-15 16:32:06 +02:00
|
|
|
logger.info('Updating download_folder of %s to %s', self.url,
|
2011-08-07 23:39:46 +02:00
|
|
|
download_folder)
|
|
|
|
self.download_folder = download_folder
|
2010-12-20 15:17:48 +01:00
|
|
|
self.save()
|
2010-07-18 20:55:08 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
save_dir = os.path.join(gpodder.downloads, self.download_folder)
|
2010-07-18 20:55:08 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
# Create save_dir if it does not yet exist
|
2011-07-15 16:32:06 +02:00
|
|
|
if not util.make_directory(save_dir):
|
|
|
|
logger.error('Could not create save_dir: %s', save_dir)
|
2010-07-18 20:55:08 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
return save_dir
|
2012-12-28 18:33:13 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
save_dir = property(fget=get_save_dir)
|
2007-11-08 20:11:57 +01:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
def remove_downloaded(self):
|
|
|
|
# Remove the download directory
|
2012-12-28 18:31:39 +01:00
|
|
|
for episode in self.get_episodes(gpodder.STATE_DOWNLOADED):
|
2011-07-08 22:57:20 +02:00
|
|
|
filename = episode.local_filename(create=False, check_only=True)
|
|
|
|
if filename is not None:
|
2012-02-04 21:43:37 +01:00
|
|
|
gpodder.user_extensions.on_episode_delete(episode, filename)
|
2011-07-23 23:26:05 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
shutil.rmtree(self.save_dir, True)
|
2010-09-30 12:37:06 +02:00
|
|
|
|
2010-12-20 15:17:48 +01:00
|
|
|
@property
|
|
|
|
def cover_file(self):
|
2012-03-05 10:44:19 +01:00
|
|
|
return os.path.join(self.save_dir, 'folder')
|
2010-02-28 04:10:39 +01:00
|
|
|
|
2010-12-20 15:48:29 +01:00
|
|
|
|
|
|
|
class Model(object):
|
|
|
|
PodcastClass = PodcastChannel
|
|
|
|
|
2011-10-12 19:59:09 +02:00
|
|
|
def __init__(self, db):
|
|
|
|
self.db = db
|
|
|
|
self.children = None
|
2010-12-20 15:48:29 +01:00
|
|
|
|
2011-10-17 13:54:49 +02:00
|
|
|
def _append_podcast(self, podcast):
|
|
|
|
if podcast not in self.children:
|
|
|
|
self.children.append(podcast)
|
|
|
|
|
|
|
|
def _remove_podcast(self, podcast):
|
|
|
|
self.children.remove(podcast)
|
2012-02-04 21:43:37 +01:00
|
|
|
gpodder.user_extensions.on_podcast_delete(self)
|
2011-10-17 13:54:49 +02:00
|
|
|
|
2011-10-12 19:59:09 +02:00
|
|
|
def get_podcasts(self):
|
|
|
|
def podcast_factory(dct, db):
|
2012-12-28 18:31:39 +01:00
|
|
|
return self.PodcastClass.create_from_dict(dct, self, dct['id'])
|
2011-10-12 19:59:09 +02:00
|
|
|
|
|
|
|
if self.children is None:
|
2011-10-17 14:17:29 +02:00
|
|
|
self.children = self.db.load_podcasts(podcast_factory)
|
2011-10-22 17:50:10 +02:00
|
|
|
|
|
|
|
# Check download folders for changes (bug 902)
|
|
|
|
for podcast in self.children:
|
|
|
|
podcast.check_download_folder()
|
|
|
|
|
2011-10-12 19:59:09 +02:00
|
|
|
return self.children
|
|
|
|
|
|
|
|
def load_podcast(self, url, create=True, authentication_tokens=None,
|
2012-01-03 23:59:19 +01:00
|
|
|
max_episodes=0):
|
2013-03-27 13:19:05 +01:00
|
|
|
assert all(url != podcast.url for podcast in self.get_podcasts())
|
2011-10-12 19:59:09 +02:00
|
|
|
return self.PodcastClass.load(self, url, create,
|
|
|
|
authentication_tokens,
|
2012-01-03 23:59:19 +01:00
|
|
|
max_episodes)
|
2010-12-20 15:48:29 +01:00
|
|
|
|
2011-07-16 20:51:26 +02:00
|
|
|
@classmethod
|
|
|
|
def podcast_sort_key(cls, podcast):
|
|
|
|
return cls.PodcastClass.sort_key(podcast)
|
|
|
|
|
2011-07-26 15:31:21 +02:00
|
|
|
@classmethod
|
|
|
|
def episode_sort_key(cls, episode):
|
|
|
|
return episode.published
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def sort_episodes_by_pubdate(cls, episodes, reverse=False):
|
2010-12-20 15:48:29 +01:00
|
|
|
"""Sort a list of PodcastEpisode objects chronologically
|
|
|
|
|
|
|
|
Returns a iterable, sorted sequence of the episodes
|
|
|
|
"""
|
2011-07-26 15:31:21 +02:00
|
|
|
return sorted(episodes, key=cls.episode_sort_key, reverse=reverse)
|
2010-12-20 15:48:29 +01:00
|
|
|
|