Skip to content

Commit

Permalink
[Temporary] Merge-Squash of PR 6807
Browse files Browse the repository at this point in the history
  • Loading branch information
sharkykh committed Jun 10, 2019
1 parent b35eaef commit 89e1ffc
Show file tree
Hide file tree
Showing 14 changed files with 477 additions and 100 deletions.
5 changes: 3 additions & 2 deletions dredd/api-description.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2565,11 +2565,12 @@ parameters:
description: The configuration to retrieve
type: string
enum:
- main
- main # Keep main first, as the tests use it
- consts
- metadata
- search
- notifiers
- search
- system
log-level:
name: level
in: query
Expand Down
32 changes: 32 additions & 0 deletions medusa/helpers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,16 @@
except ImportError:
reflink = None

try:
from psutil import Process
memory_usage_tool = 'psutil'
except ImportError:
try:
import resource # resource module is unix only
memory_usage_tool = 'resource'
except ImportError:
memory_usage_tool = None


def indent_xml(elem, level=0):
"""Do our pretty printing and make Matt very happy."""
Expand Down Expand Up @@ -1450,6 +1460,28 @@ def get_disk_space_usage(disk_path=None, pretty=True):
return False


def memory_usage(pretty=True):
"""
Get the current memory usage (if possible).
:param pretty: True for human readable size, False for bytes
:return: Current memory usage
"""
usage = ''
if memory_usage_tool == 'resource':
usage = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
elif memory_usage_tool == 'psutil':
usage = Process(os.getpid()).memory_info().rss
else:
return ''

if pretty:
usage = pretty_file_size(usage)

return usage


def get_tvdb_from_id(indexer_id, indexer):

session = MedusaSafeSession()
Expand Down
18 changes: 17 additions & 1 deletion medusa/helpers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,8 @@ def strtobool(val):


def to_timestamp(dt):
"""Return POSIX timestamp corresponding to the datetime instance.
"""
Return POSIX timestamp corresponding to the datetime instance.
:param dt: datetime (possibly aware)
:return: seconds since epoch as float
Expand All @@ -102,3 +103,18 @@ def to_camel_case(snake_str):
"""Convert a snake formatted string to camel case."""
components = snake_str.split('_')
return components[0] + ''.join(x.title() for x in components[1:])


def timedelta_in_milliseconds(td):
"""
Return the value of the timedelta object in milliseconds.
:param td: timedelta
:type td: timedelta
:return: the value of the timedelta in milliseconds
:rtype: int
"""
if not td:
return 0

return int(td.total_seconds() * 1000)
16 changes: 16 additions & 0 deletions medusa/server/api/v2/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
common,
config,
db,
helpers,
logger,
ws,
)
Expand All @@ -33,6 +34,10 @@
iter_nested_items,
set_nested_value,
)
from medusa.system.schedulers import (
generate_schedulers,
generate_show_queue,
)

from six import iteritems, itervalues, text_type
from six.moves import map
Expand Down Expand Up @@ -914,6 +919,17 @@ def data_notifiers():

return section_data

@staticmethod
def data_system():
"""System information."""
section_data = {}

section_data['memoryUsage'] = helpers.memory_usage(pretty=True)
section_data['schedulers'] = generate_schedulers()
section_data['showQueue'] = generate_show_queue()

return section_data

@staticmethod
def data_clients():
"""Notifications."""
Expand Down
178 changes: 95 additions & 83 deletions medusa/server/api/v2/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
WANTED
)
from medusa.server.api.v2.base import BaseRequestHandler
from medusa.show.show import Show

from six.moves import map


class StatsHandler(BaseRequestHandler):
Expand All @@ -28,93 +31,102 @@ class StatsHandler(BaseRequestHandler):
#: identifier
identifier = ('identifier', r'\w+')
#: path param
path_param = ('path_param', r'\w+')
path_param = None
#: allowed HTTP methods
allowed_methods = ('GET', )

def get(self, identifier, path_param=None):
def get(self, identifier):
"""Query statistics.
:param identifier:
:param path_param:
:type path_param: str
:param identifier: The type of statistics to query
:type identifier: str
"""
pre_today = [SKIPPED, WANTED, FAILED]
snatched = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST]
downloaded = [DOWNLOADED, ARCHIVED]

def query_in(items):
return '({0})'.format(','.join(map(str, items)))

query = dedent("""\
SELECT indexer AS indexerId, showid AS seriesId,
SUM(
season > 0 AND
episode > 0 AND
airdate > 1 AND
status IN {status_quality}
) AS epSnatched,
SUM(
season > 0 AND
episode > 0 AND
if not identifier or identifier == 'overall':
data = overall_stats()
elif identifier == 'show':
data = per_show_stats()
else:
return self._not_found('Statistics not found')

return self._ok(data=data)


def overall_stats():
"""Generate overall library statistics."""
return Show.overall_stats()


def per_show_stats():
"""Generate per-show library statistics."""
pre_today = [SKIPPED, WANTED, FAILED]
snatched = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST]
downloaded = [DOWNLOADED, ARCHIVED]

def query_in(items):
return '({0})'.format(','.join(map(str, items)))

query = dedent("""\
SELECT indexer AS indexerId, showid AS seriesId,
SUM(
season > 0 AND
episode > 0 AND
airdate > 1 AND
status IN {status_quality}
) AS epSnatched,
SUM(
season > 0 AND
episode > 0 AND
airdate > 1 AND
status IN {status_download}
) AS epDownloaded,
SUM(
season > 0 AND
episode > 0 AND
airdate > 1 AND (
(airdate <= {today} AND status IN {status_pre_today}) OR
status IN {status_both}
)
) AS epTotal,
(SELECT airdate FROM tv_episodes
WHERE showid=tv_eps.showid AND
indexer=tv_eps.indexer AND
airdate >= {today} AND
(status = {unaired} OR status = {wanted})
ORDER BY airdate ASC
LIMIT 1
) AS epAirsNext,
(SELECT airdate FROM tv_episodes
WHERE showid=tv_eps.showid AND
indexer=tv_eps.indexer AND
airdate > 1 AND
status IN {status_download}
) AS epDownloaded,
SUM(
season > 0 AND
episode > 0 AND
airdate > 1 AND (
(airdate <= {today} AND status IN {status_pre_today}) OR
status IN {status_both}
)
) AS epTotal,
(SELECT airdate FROM tv_episodes
WHERE showid=tv_eps.showid AND
indexer=tv_eps.indexer AND
airdate >= {today} AND
(status = {unaired} OR status = {wanted})
ORDER BY airdate ASC
LIMIT 1
) AS epAirsNext,
(SELECT airdate FROM tv_episodes
WHERE showid=tv_eps.showid AND
indexer=tv_eps.indexer AND
airdate > 1 AND
status <> {unaired}
ORDER BY airdate DESC
LIMIT 1
) AS epAirsPrev,
SUM(file_size) AS seriesSize
FROM tv_episodes tv_eps
GROUP BY showid, indexer
""").format(
status_quality=query_in(snatched),
status_download=query_in(downloaded),
status_both=query_in(snatched + downloaded),
today=date.today().toordinal(),
status_pre_today=query_in(pre_today),
skipped=SKIPPED,
wanted=WANTED,
unaired=UNAIRED,
)

main_db_con = db.DBConnection()
sql_result = main_db_con.select(query)

stats_data = {}
stats_data['seriesStat'] = list()
stats_data['maxDownloadCount'] = 1000
for cur_result in sql_result:
stats_data['seriesStat'].append(dict(cur_result))
if cur_result['epTotal'] > stats_data['maxDownloadCount']:
stats_data['maxDownloadCount'] = cur_result['epTotal']

stats_data['maxDownloadCount'] *= 100

if identifier is not None:
if identifier not in stats_data:
return self._bad_request('{key} is a invalid path'.format(key=identifier))

stats_data = stats_data[identifier]

return self._ok(data=stats_data)
status <> {unaired}
ORDER BY airdate DESC
LIMIT 1
) AS epAirsPrev,
SUM(file_size) AS seriesSize
FROM tv_episodes tv_eps
GROUP BY showid, indexer
""").format(
status_quality=query_in(snatched),
status_download=query_in(downloaded),
status_both=query_in(snatched + downloaded),
today=date.today().toordinal(),
status_pre_today=query_in(pre_today),
skipped=SKIPPED,
wanted=WANTED,
unaired=UNAIRED,
)

main_db_con = db.DBConnection()
sql_result = main_db_con.select(query)

stats_data = {}
stats_data['seriesStat'] = []
stats_data['maxDownloadCount'] = 1000
for cur_result in sql_result:
stats_data['seriesStat'].append(cur_result)
if cur_result['epTotal'] > stats_data['maxDownloadCount']:
stats_data['maxDownloadCount'] = cur_result['epTotal']

stats_data['maxDownloadCount'] *= 100
return stats_data
Loading

0 comments on commit 89e1ffc

Please sign in to comment.