Skip to content

Commit

Permalink
Merge pull request #5899 from pymedusa/release/release-0.2.14
Browse files Browse the repository at this point in the history
Release/release 0.2.14
  • Loading branch information
p0psicles authored Dec 19, 2018
2 parents 54da0c7 + 2c5dfcd commit 918cfe7
Show file tree
Hide file tree
Showing 54 changed files with 800 additions and 332 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ cache:
- node_modules
before_install:
- python .github/check_version.py
- sudo rm -f /etc/boto.cfg
install: false
script: false
jobs:
Expand Down
23 changes: 23 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,26 @@
## 0.2.14 (2018-12-19)

#### New Features
- Added provider nordicbits ([#5854](https://github.com/pymedusa/Medusa/pull/5854))

#### Improvements
- Change the way we calculate and check the daily search interval for providers ([#5855](https://github.com/pymedusa/Medusa/issues/5855))
- During a backlog search, we searched for "any" cache result. And if the case, didn't attempt pulling new results from the provider. Now we search the provider when we didn't get any "candidates" from cache. ([#5816](https://github.com/pymedusa/Medusa/issues/5816))

#### Fixes
- Fixed double absolute numbers for anime shows where thexem sets an absolute which already exists ([#5801](https://github.com/pymedusa/Medusa/pull/5801))
- Fixed image cache not properly created from metadata for images other then posters ([#5810](https://github.com/pymedusa/Medusa/pull/5810))
- Fixed episode status comparison in subtitleMissedPP ([#5813](https://github.com/pymedusa/Medusa/pull/5813))
- Fixed anidex title parsing ([#5837](https://github.com/pymedusa/Medusa/pull/5837))
- Fixed (restore) the posibilty or configuring the default daily search search interval ([#5823](https://github.com/pymedusa/Medusa/pull/5823))
- Fixed notifications - kodi, 'allways on' config option ([#5871](https://github.com/pymedusa/Medusa/pull/5871))
- Fixed mis-mapped proper search interval config option of 24 hours, added 30 minutes ([#5896](https://github.com/pymedusa/Medusa/pull/5896))
- Fixed config - search settings, test nzb client connectivity ([#5897](https://github.com/pymedusa/Medusa/pull/5897))
- Fixed adding an episode to the my anidb list on post processing when enabled ([#5897](https://github.com/pymedusa/Medusa/pull/5897))
- Fixed creating banner and fanart from metadata. Any metadata images in the shows folder other then the poster, will now also become visible in medusa ([#5808](https://github.com/pymedusa/Medusa/pull/5808))

-----

## 0.2.13 (2018-11-21)

#### Improvements
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
FROM lsiobase/alpine.python:3.8
MAINTAINER bobbysteel
MAINTAINER a10kiloham

# set version label
ARG BUILD_DATE
Expand Down
7 changes: 6 additions & 1 deletion SickBeard.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python
# -*- coding: utf-8 -*
"""Script for backwards compatibility."""
from __future__ import unicode_literals

import sys

from medusa.__main__ import main

if __name__ == '__main__':
if sys.version_info.major == 3 and sys.version_info.minor < 5:
print('Medusa supports Python 2 from version 2.7.10 and Python 3 from version 3.5.0, exiting!')
raise Exception('Incorrect Python version. Shutting down!')
main()
28 changes: 27 additions & 1 deletion medusa/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,6 +425,7 @@ def initialize(self, console_logging=True):
app.GIT_PASSWORD = check_setting_str(app.CFG, 'General', 'git_password', '', censor_log='low')
app.GIT_TOKEN = check_setting_str(app.CFG, 'General', 'git_token', '', censor_log='low', encrypted=True)
app.DEVELOPER = bool(check_setting_int(app.CFG, 'General', 'developer', 0))
app.PYTHON_VERSION = check_setting_list(app.CFG, 'General', 'python_version', [], transform=int)

# debugging
app.DEBUG = bool(check_setting_int(app.CFG, 'General', 'debug', 0))
Expand Down Expand Up @@ -1077,9 +1078,12 @@ def initialize(self, console_logging=True):
# Disable flag to erase cache
app.SUBTITLES_ERASE_CACHE = False

# Check if we start with a different Python version since last start
python_version_changed = self.migrate_python_version()

# Check if we need to perform a restore of the cache folder
Application.restore_cache_folder(app.CACHE_DIR)
cache.configure(app.CACHE_DIR)
cache.configure(app.CACHE_DIR, replace=python_version_changed)

# Rebuild the censored list
app_logger.rebuild_censored_list()
Expand Down Expand Up @@ -1247,6 +1251,27 @@ def path_leaf(path):
folder_path = os.path.join(cache_folder, name)
helpers.remove_folder(folder_path)

@staticmethod
def migrate_python_version():
"""
Perform some cleanups in case we switch between major Python versions.
It's possible to switch from Python version 2 to 3 or vice versa.
In that case we might wanna run some sanity actions, to make sure everything keeps working.
:return: True if the major Python version has changed since last start
:return type: Boolean
"""
# TODO: Leaving this here as a marking for when we merge the python3 changes.
current_version = app.PYTHON_VERSION
app.PYTHON_VERSION = list(sys.version_info)[:3]

# Run some sanitation when switching between Python versions
if current_version and current_version[0] != app.PYTHON_VERSION[0]:
return True

return False

@staticmethod
def start_threads():
"""Start application threads."""
Expand Down Expand Up @@ -1545,6 +1570,7 @@ def save_config():
new_config['General']['calendar_icons'] = int(app.CALENDAR_ICONS)
new_config['General']['no_restart'] = int(app.NO_RESTART)
new_config['General']['developer'] = int(app.DEVELOPER)
new_config['General']['python_version'] = app.PYTHON_VERSION
new_config['General']['display_all_seasons'] = int(app.DISPLAY_ALL_SEASONS)
new_config['General']['news_last_read'] = app.NEWS_LAST_READ
new_config['General']['broken_providers'] = helpers.get_broken_providers() or app.BROKEN_PROVIDERS
Expand Down
1 change: 1 addition & 0 deletions medusa/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def __init__(self):
# static configuration
self.LOCALE = None, None
self.OS_USER = None
self.PYTHON_VERSION = []
self.OPENSSL_VERSION = None
self.APP_VERSION = None
self.MAJOR_DB_VERSION = None
Expand Down
10 changes: 5 additions & 5 deletions medusa/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,34 +47,34 @@ def release_write_lock(self):
anidb_cache = make_region()


def configure(cache_dir):
def configure(cache_dir, replace=False):
"""Configure caches."""
# memory cache
from subliminal.cache import region as subliminal_cache

memory_cache.configure('dogpile.cache.memory', expiration_time=timedelta(hours=1))

# subliminal cache
subliminal_cache.configure('dogpile.cache.dbm',
subliminal_cache.configure('dogpile.cache.dbm', replace_existing_backend=replace,
expiration_time=timedelta(days=30),
arguments={
'filename': os.path.join(cache_dir, 'subliminal.dbm'),
'lock_factory': MutexLock})

# application cache
cache.configure('dogpile.cache.dbm',
cache.configure('dogpile.cache.dbm', replace_existing_backend=replace,
expiration_time=timedelta(days=1),
arguments={'filename': os.path.join(cache_dir, 'application.dbm'),
'lock_factory': MutexLock})

# recommended series cache
recommended_series_cache.configure('dogpile.cache.dbm',
recommended_series_cache.configure('dogpile.cache.dbm', replace_existing_backend=replace,
expiration_time=timedelta(days=7),
arguments={'filename': os.path.join(cache_dir, 'recommended.dbm'),
'lock_factory': MutexLock})

# anidb (adba) series cache
anidb_cache.configure('dogpile.cache.dbm',
anidb_cache.configure('dogpile.cache.dbm', replace_existing_backend=replace,
expiration_time=timedelta(days=3),
arguments={'filename': os.path.join(cache_dir, 'anidb.dbm'),
'lock_factory': MutexLock})
Expand Down
2 changes: 1 addition & 1 deletion medusa/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
long = int

INSTANCE_ID = str(uuid.uuid1())
VERSION = '0.2.13'
VERSION = '0.2.14'
USER_AGENT = 'Medusa/{version} ({system}; {release}; {instance})'.format(
version=VERSION, system=platform.system(), release=platform.release(),
instance=INSTANCE_ID)
Expand Down
4 changes: 2 additions & 2 deletions medusa/helpers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ def move_and_symlink_file(src_file, dest_file):
u'Failed to create symlink of {source} at {destination}.'
u' Error: {error!r}', {
'source': src_file,
'dest': dest_file,
'destination': dest_file,
'error': msg,
}
)
Expand All @@ -413,7 +413,7 @@ def move_and_symlink_file(src_file, dest_file):
u'Failed to create symlink of {source} at {destination}.'
u' Error: {error!r}. Copying instead', {
'source': src_file,
'dest': dest_file,
'destination': dest_file,
'error': msg,
}
)
Expand Down
4 changes: 2 additions & 2 deletions medusa/image_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,8 +301,8 @@ def fill_cache(series_obj):
log.debug('Checking {provider.name} metadata for {img}',
{'provider': provider, 'img': IMAGE_TYPES[img_type]})

if os.path.isfile(provider.get_poster_path(series_obj)):
path = provider.get_poster_path(series_obj)
path = provider.get_image_path(series_obj, img_type)
if os.path.isfile(path):
filename = os.path.abspath(path)
file_type = which_type(filename)

Expand Down
16 changes: 16 additions & 0 deletions medusa/metadata/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,12 @@
log = BraceAdapter(logging.getLogger(__name__))
log.logger.addHandler(logging.NullHandler())

BANNER = 1
POSTER = 2
BANNER_THUMB = 3
POSTER_THUMB = 4
FANART = 5


class GenericMetadata(object):
"""
Expand Down Expand Up @@ -173,6 +179,16 @@ def get_poster_path(self, show_obj):
def get_banner_path(self, show_obj):
return os.path.join(show_obj.location, self.banner_name)

def get_image_path(self, show_obj, image_type):
"""Based on the image_type (banner, poster, fanart) call the correct method, and return the path."""
banner_path = {
BANNER: self.get_banner_path,
POSTER: self.get_poster_path,
FANART: self.get_fanart_path
}
if banner_path.get(image_type):
return banner_path[image_type](show_obj)

@staticmethod
def get_episode_thumb_path(ep_obj):
"""
Expand Down
4 changes: 4 additions & 0 deletions medusa/name_parser/rules/rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -397,6 +397,10 @@ def when(self, matches, context):
:type context: dict
:return:
"""
# Don't add additional alias if we already have one from the previous rules
if matches.named('alias'):
return

fileparts = matches.markers.named('path')
for filepart in marker_sorted(fileparts, matches):
title = matches.range(filepart.start, filepart.end, predicate=lambda match: match.name == 'title', index=0)
Expand Down
8 changes: 4 additions & 4 deletions medusa/notifiers/trakt.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def update_library(ep_obj):
trakt_api.request('sync/collection', data, method='POST')

except (TokenExpiredException, TraktException, AuthException) as error:
log.debug('Unable to update Trakt: {0}', error.message)
log.debug('Unable to update Trakt: {0!r}', error)

@staticmethod
def update_watchlist(show_obj=None, s=None, e=None, data_show=None, data_episode=None, update='add'):
Expand Down Expand Up @@ -176,7 +176,7 @@ def update_watchlist(show_obj=None, s=None, e=None, data_show=None, data_episode
trakt_api.request(trakt_url, data, method='POST')

except (TokenExpiredException, TraktException, AuthException) as error:
log.debug('Unable to update Trakt watchlist: {0}', error.message)
log.debug('Unable to update Trakt watchlist: {0!r}', error)
return False

return True
Expand Down Expand Up @@ -244,5 +244,5 @@ def test_notify(username, blacklist_name=None):
else:
return 'Test notice sent successfully to Trakt'
except (TokenExpiredException, TraktException, AuthException) as error:
log.warning('Unable to test TRAKT: {0}', error.message)
return 'Test notice failed to Trakt: {0}'.format(error.message)
log.warning('Unable to test TRAKT: {0!r}', error)
return 'Test notice failed to Trakt: {0!r}'.format(error)
2 changes: 1 addition & 1 deletion medusa/post_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,7 @@ def _add_to_anidb_mylist(self, file_path):

self.log(u'Adding the file to the anidb mylist', logger.DEBUG)
try:
self.anidbEpisode.add_to_mylist(status=1) # status = 1 sets the status of the file to "internal HDD"
self.anidbEpisode.add_to_mylist(state=1) # state = 1 sets the state of the file to "internal HDD"
except Exception as e:
self.log(u'Exception message: {0!r}'.format(e))

Expand Down
3 changes: 2 additions & 1 deletion medusa/providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
nebulance,
newpct,
norbits,
nordicbits,
nyaa,
pretome,
privatehd,
Expand Down Expand Up @@ -69,7 +70,7 @@
'speedcd', 'nyaa', 'torrentbytes', 'torrent9', 'morethantv', 'tokyotoshokan', 'iptorrents',
'hebits', 'alpharatio', 'sdbits', 'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'xthor',
'abnormal', 'scenetime', 'nebulance', 'tvchaosuk', 'bitcannon', 'torrentz2', 'pretome', 'anizb',
'hdspace', 'newpct', 'danishbits', 'limetorrents', 'norbits', 'bithdtv',
'hdspace', 'newpct', 'nordicbits', 'danishbits', 'limetorrents', 'norbits', 'bithdtv',
'zooqle', 'animebytes', 'animetorrents', 'horriblesubs', 'anidex', 'shanaproject', 'torrenting',
'yggtorrent', 'elitetracker', 'archetorrent', 'privatehd', 'cinemaz', 'avistaz', 'bjshare', 'btdb'
]
Expand Down
39 changes: 26 additions & 13 deletions medusa/providers/generic_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,9 +238,33 @@ def remove_duplicate_mappings(items, pk='link'):
))
)

def search_results_in_cache(self, episodes):
"""
Search episodes based on param in cache.
Search the cache (db) for this provider
:param episodes: List of Episode objects
:return: A dict of search results, ordered by episode number
"""
return self.cache.find_episodes(episodes)

def find_search_results(self, series, episodes, search_mode, forced_search=False, download_current_quality=False,
manual_search=False, manual_search_type='episode'):
"""Search episodes based on param."""
"""
Search episodes based on param.
Search the provider using http queries.
:param series: Series object
:param episodes: List of Episode objects
:param search_mode: 'eponly' or 'sponly'
:param forced_search: Flag if the search was triggered by a forced search
:param download_current_quality: Flag if we want to include an already downloaded quality in the new search
:param manual_search: Flag if the search was triggered by a manual search
:param manual_search_type: How the manual search was started: For example an 'episode' or 'season'
:return: A dict of search results, ordered by episode number.
"""
self._check_auth()
self.series = series

Expand All @@ -249,18 +273,6 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False
season_search = (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly'

for episode in episodes:
if not manual_search:
cache_results = self.cache.find_needed_episodes(
episode, forced_search=forced_search, down_cur_quality=download_current_quality
)
if cache_results:
for episode_no in cache_results:
if episode_no not in results:
results[episode_no] = cache_results[episode_no]
else:
results[episode_no] += cache_results[episode_no]
continue

search_strings = []
if season_search:
search_strings = self._get_season_search_strings(episode)
Expand Down Expand Up @@ -471,6 +483,7 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False
', '.join(map(str, search_result.parsed_result.episode_numbers)),
search_result.name,
search_result.url)

if episode_number not in results:
results[episode_number] = [search_result]
else:
Expand Down
4 changes: 2 additions & 2 deletions medusa/providers/nzb/anizb.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

from medusa import tv
from medusa.bs4_parser import BS4Parser
from medusa.helper.common import try_int
from medusa.helper.common import convert_size, try_int
from medusa.logger.adapters.style import BraceAdapter
from medusa.providers.nzb.nzb_provider import NZBProvider

Expand Down Expand Up @@ -83,7 +83,7 @@ def search(self, search_strings, age=0, ep_obj=None, **kwargs):
continue

# description = item.find('description')
size = try_int(item.enclosure.get('length', -1))
size = convert_size(item.enclosure.get('length'), default=-1)

item = {
'title': title,
Expand Down
5 changes: 3 additions & 2 deletions medusa/providers/torrent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
morethantv,
nebulance,
newpct,
nordicbits,
pretome,
privatehd,
scenetime,
Expand Down Expand Up @@ -66,8 +67,8 @@

__all__ = [
'abnormal', 'alpharatio', 'animebytes', 'archetorrent', 'bithdtv', 'torrent9', 'danishbits',
'hdspace', 'hdtorrents', 'iptorrents', 'limetorrents', 'morethantv', 'torznab',
'newpct', 'pretome', 'sdbits', 'scenetime', 'speedcd', 'thepiratebay', 'tntvillage', 'tokyotoshokan',
'hdspace', 'hdtorrents', 'iptorrents', 'limetorrents', 'morethantv', 'torznab', 'newpct', 'nordicbits',
'pretome', 'sdbits', 'scenetime', 'speedcd', 'thepiratebay', 'tntvillage', 'tokyotoshokan',
'torrentbytes', 'torrentleech', 'nebulance', 'tvchaosuk', 'xthor', 'zooqle', 'bitcannon', 'btn',
'hdbits', 'norbits', 'rarbg', 'torrentday', 'nyaa', 'rsstorrent', 'shazbat', 'hebits',
'torrentz2', 'animetorrents', 'horriblesubs', 'anidex', 'shanaproject', 'torrenting', 'yggtorrent',
Expand Down
Loading

0 comments on commit 918cfe7

Please sign in to comment.