Treat only really new episodes as new after an update

Make PodcastChannel.update() return a list of new episodes. Add a new
config variable ui.gtk.only_added_are_new. If this variable is True,
download, queue or show (depending on the value of config var
auto_download) only these new episodes, instead of all episodes marked
as new, after an update.
This commit is contained in:
Teemu Ikonen 2022-06-03 14:06:04 +03:00
parent 8b5bf2c361
commit 2552e6e0ec
3 changed files with 18 additions and 11 deletions

View File

@ -154,6 +154,7 @@ defaults = {
'toolbar': False,
'new_episodes': 'show', # ignore, show, queue, download
'only_added_are_new': False, # Only just added episodes are considered new after an update
'live_search_delay': 200,
'search_always_visible': False,
'find_as_you_type': True,

View File

@ -2842,6 +2842,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
def update_feed_cache_proc():
updated_channels = []
nr_update_errors = 0
new_episodes = []
for updated, channel in enumerate(channels):
if self.feed_cache_update_cancelled:
break
@ -2855,7 +2856,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
try:
channel._update_error = None
util.idle_add(indicate_updating_podcast, channel)
channel.update(max_episodes=self.config.limit.episodes)
new_episodes.extend(channel.update(max_episodes=self.config.limit.episodes))
self._update_cover(channel)
except Exception as e:
message = str(e)
@ -2899,7 +2900,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
nr_update_errors) % {'count': nr_update_errors},
_('Error while updating feeds'), widget=self.treeChannels)
def update_feed_cache_finish_callback():
def update_feed_cache_finish_callback(new_episodes):
# Process received episode actions for all updated URLs
self.process_received_episode_actions()
@ -2912,9 +2913,11 @@ class gPodder(BuilderWidget, dbus.service.Object):
# The user decided to abort the feed update
self.show_update_feeds_buttons()
# Only search for new episodes in podcasts that have been
# updated, not in other podcasts (for single-feed updates)
episodes = self.get_new_episodes([c for c in updated_channels])
# The filter extension can mark newly added episodes as old,
# so take only episodes marked as new.
episodes = ((e for e in new_episodes if e.check_is_new())
if self.config.ui.gtk.only_added_are_new
else self.get_new_episodes([c for c in updated_channels]))
if self.config.downloads.chronological_order:
# download older episodes first
@ -2965,7 +2968,7 @@ class gPodder(BuilderWidget, dbus.service.Object):
self.show_update_feeds_buttons()
util.idle_add(update_feed_cache_finish_callback)
util.idle_add(update_feed_cache_finish_callback, new_episodes)
def on_gPodder_delete_event(self, *args):
"""Called when the GUI wants to close the window

View File

@ -1222,7 +1222,7 @@ class PodcastChannel(PodcastModelObject):
next_feed = None
# mark episodes not new
real_new_episode_count = 0
real_new_episodes = []
# Search all entries for new episodes
for episode in new_episodes:
# Workaround for bug 340: If the episode has been
@ -1234,17 +1234,18 @@ class PodcastChannel(PodcastModelObject):
episode.save()
if episode.is_new:
real_new_episode_count += 1
real_new_episodes.append(episode)
# Only allow a certain number of new episodes per update
if (self.download_strategy == PodcastChannel.STRATEGY_LATEST
and real_new_episode_count > 1):
and len(real_new_episodes) > 1):
episode.is_new = False
episode.save()
self.children.extend(new_episodes)
self.remove_unreachable_episodes(existing, seen_guids, max_episodes)
return real_new_episodes
def remove_unreachable_episodes(self, existing, seen_guids, max_episodes):
# Remove "unreachable" episodes - episodes that have not been
@ -1276,11 +1277,12 @@ class PodcastChannel(PodcastModelObject):
def update(self, max_episodes=0):
max_episodes = int(max_episodes)
new_episodes = []
try:
result = self.feed_fetcher.fetch_channel(self, max_episodes)
if result.status == feedcore.UPDATED_FEED:
self._consume_updated_feed(result.feed, max_episodes)
new_episodes = self._consume_updated_feed(result.feed, max_episodes)
elif result.status == feedcore.NEW_LOCATION:
# FIXME: could return the feed because in autodiscovery it is parsed already
url = result.feed
@ -1290,7 +1292,7 @@ class PodcastChannel(PodcastModelObject):
self.url = url
# With the updated URL, fetch the feed again
self.update(max_episodes)
return
return new_episodes
elif result.status == feedcore.NOT_MODIFIED:
pass
@ -1317,6 +1319,7 @@ class PodcastChannel(PodcastModelObject):
self._determine_common_prefix()
self.db.commit()
return new_episodes
def delete(self):
self.db.delete_podcast(self)