Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merge dev back to master for release. #89

Merged
merged 5 commits into from
Oct 30, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# Include the license and version files.
include LICENSE
include dsub/VERSION
include dsub/providers/local/runner.sh
2 changes: 1 addition & 1 deletion dsub/_dsub_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@
file could break setup.py.
"""

DSUB_VERSION = '0.1.1'
DSUB_VERSION = '0.1.2'
48 changes: 24 additions & 24 deletions dsub/commands/ddel.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,15 @@

Follows the model of qdel.
"""
import argparse
import sys

from ..lib import dsub_util
from ..lib import param_util
from ..lib import resources
from ..providers import provider_base


def parse_arguments():
def _parse_arguments():
"""Parses command line arguments.

Returns:
Expand All @@ -32,25 +33,11 @@ def parse_arguments():
# Handle version flag and exit if it was passed.
param_util.handle_version_flag()

provider_required_args = {
'google': ['project'],
'test-fails': [],
'local': [],
}
epilog = 'Provider-required arguments:\n'
for provider in provider_required_args:
epilog += ' %s: %s\n' % (provider, provider_required_args[provider])
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter, epilog=epilog)
provider_base.add_provider_argument(parser)
parser = provider_base.create_parser(sys.argv[0])

parser.add_argument(
'--version', '-v', default=False, help='Print the dsub version and exit.')
google = parser.add_argument_group(
title='google',
description='Options for the Google provider (Pipelines API)')
google.add_argument(
'--project',
help='Cloud project ID in which to find and delete the job(s)')

parser.add_argument(
'--jobs',
'-j',
Expand Down Expand Up @@ -82,10 +69,23 @@ def parse_arguments():
default=[],
help='User labels to match. Tasks returned must match all labels.',
metavar='KEY=VALUE')
return parser.parse_args()

# Add provider-specific arguments
google = parser.add_argument_group(
title='google',
description='Options for the Google provider (Pipelines API)')
google.add_argument(
'--project',
help='Cloud project ID in which to find and delete the job(s)')

return provider_base.parse_args(parser, {
'google': ['project'],
'test-fails': [],
'local': [],
}, sys.argv[1:])


def emit_search_criteria(users, jobs, tasks, labels):
def _emit_search_criteria(users, jobs, tasks, labels):
"""Print the filters used to delete tasks. Use raw flags as arguments."""
print 'Delete running jobs:'
print ' user:'
Expand All @@ -103,13 +103,13 @@ def emit_search_criteria(users, jobs, tasks, labels):

def main():
# Parse args and validate
args = parse_arguments()
args = _parse_arguments()

# Compute the age filter (if any)
create_time = param_util.age_to_create_time(args.age)

# Set up the Genomics Pipelines service interface
provider = provider_base.get_provider(args)
provider = provider_base.get_provider(args, resources)

# Make sure users were provided, or try to fill from OS user. This cannot
# be made into a default argument since some environments lack the ability
Expand All @@ -121,7 +121,7 @@ def main():

# Let the user know which jobs we are going to look up
with dsub_util.replace_print():
emit_search_criteria(user_list, args.jobs, args.tasks, args.label)
_emit_search_criteria(user_list, args.jobs, args.tasks, args.label)
# Delete the requested jobs
deleted_tasks = ddel_tasks(provider, user_list, args.jobs, args.tasks,
labels, create_time)
Expand Down
53 changes: 25 additions & 28 deletions dsub/commands/dstat.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,16 @@

from __future__ import print_function

import argparse
import collections
from datetime import datetime
import json
import sys
import time
from dateutil.tz import tzlocal

from ..lib import dsub_util
from ..lib import param_util
from ..lib import resources
from ..providers import provider_base

import tabulate
Expand Down Expand Up @@ -191,7 +192,7 @@ def print_table(self, table):
print(json.dumps(table, indent=2, default=self.serialize))


def prepare_row(task, full):
def _prepare_row(task, full):
"""return a dict with the task's info (more if "full" is set)."""

# Would like to include the Job ID in the default set of columns, but
Expand Down Expand Up @@ -236,7 +237,7 @@ def prepare_row(task, full):
return row


def parse_arguments():
def _parse_arguments():
"""Parses command line arguments.

Returns:
Expand All @@ -245,21 +246,11 @@ def parse_arguments():
# Handle version flag and exit if it was passed.
param_util.handle_version_flag()

provider_required_args = {
'google': ['project'],
'test-fails': [],
'local': [],
}
epilog = 'Provider-required arguments:\n'
for provider in provider_required_args:
epilog += ' %s: %s\n' % (provider, provider_required_args[provider])
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter, epilog=epilog)
parser = provider_base.create_parser(sys.argv[0])

parser.add_argument(
'--version', '-v', default=False, help='Print the dsub version and exit.')
parser.add_argument(
'--project',
help='Cloud project ID in which to query pipeline operations')

parser.add_argument(
'--jobs',
'-j',
Expand Down Expand Up @@ -289,7 +280,9 @@ def parse_arguments():
default=['RUNNING'],
choices=['RUNNING', 'SUCCESS', 'FAILURE', 'CANCELED', '*'],
help="""Lists only those jobs which match the specified status(es).
Use "*" to list jobs of any status.""")
Choose from {'RUNNING', 'SUCCESS', 'FAILURE', 'CANCELED'}.
Use "*" to list jobs of any status.""",
metavar='STATUS')
parser.add_argument(
'--age',
help="""List only those jobs newer than the specified age. Ages can be
Expand Down Expand Up @@ -325,21 +318,25 @@ def parse_arguments():
'--format',
choices=['text', 'json', 'yaml', 'provider-json'],
help='Set the output format.')
# Add provider-specific arguments
provider_base.add_provider_argument(parser)

args = parser.parse_args()
# Add provider-specific arguments
google = parser.add_argument_group(
title='google',
description='Options for the Google provider (Pipelines API)')
google.add_argument(
'--project',
help='Cloud project ID in which to find and delete the job(s)')

# check special flag rules
for arg in provider_required_args[args.provider]:
if not args.__getattribute__(arg):
parser.error('argument --%s is required' % arg)
return args
return provider_base.parse_args(parser, {
'google': ['project'],
'test-fails': [],
'local': [],
}, sys.argv[1:])


def main():
# Parse args and validate
args = parse_arguments()
args = _parse_arguments()

# Compute the age filter (if any)
create_time = param_util.age_to_create_time(args.age)
Expand All @@ -362,7 +359,7 @@ def main():
output_formatter = TextOutput(args.full)

# Set up the Genomics Pipelines service interface
provider = provider_base.get_provider(args)
provider = provider_base.get_provider(args, resources)

# Set poll interval to zero if --wait is not set.
poll_interval = args.poll_interval if args.wait else 0
Expand Down Expand Up @@ -458,7 +455,7 @@ def dstat_job_producer(provider,
if raw_format:
formatted_tasks.append(task.raw_task_data())
else:
formatted_tasks.append(prepare_row(task, full_output))
formatted_tasks.append(_prepare_row(task, full_output))

# Determine if any of the jobs are running.
if task.get_field('task-status') == 'RUNNING':
Expand Down
31 changes: 10 additions & 21 deletions dsub/commands/dsub.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from ..lib import dsub_util
from ..lib import job_util
from ..lib import param_util
from ..lib import resources
from ..lib.dsub_util import print_error
from ..providers import provider_base

Expand Down Expand Up @@ -182,22 +183,12 @@ def _parse_arguments(prog, argv):
# Handle version flag and exit if it was passed.
param_util.handle_version_flag()

provider_required_args = {
'google': ['project', 'zones', 'logging'],
'test-fails': [],
'local': ['logging'],
}
epilog = 'Provider-required arguments:\n'
for provider in provider_required_args:
epilog += ' %s: %s\n' % (provider, provider_required_args[provider])
parser = argparse.ArgumentParser(
prog=prog,
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=epilog)
parser = provider_base.create_parser(prog)

# Add dsub core job submission arguments
parser.add_argument(
'--version', '-v', default=False, help='Print the dsub version and exit.')

parser.add_argument(
'--name',
help="""Name for pipeline. Defaults to the script name or
Expand Down Expand Up @@ -331,7 +322,6 @@ def _parse_arguments(prog, argv):
' (either a folder, or file ending in ".log")')

# Add provider-specific arguments
provider_base.add_provider_argument(parser)
google = parser.add_argument_group(
title='google',
description='Options for the Google provider (Pipelines API)')
Expand Down Expand Up @@ -362,13 +352,12 @@ def _parse_arguments(prog, argv):
Allows for connecting to the VM for debugging.
Default is 0; maximum allowed value is 86400 (1 day).""")

args = parser.parse_args(argv)

# check special flag rules
for arg in provider_required_args[args.provider]:
if not args.__getattribute__(arg):
parser.error('argument --%s is required' % arg)
return args
return provider_base.parse_args(
parser, {
'google': ['project', 'zones', 'logging'],
'test-fails': [],
'local': ['logging'],
}, argv)


def _get_job_resources(args):
Expand Down Expand Up @@ -704,7 +693,7 @@ def run_main(args):
}]

return run(
provider_base.get_provider(args),
provider_base.get_provider(args, resources),
_get_job_resources(args),
job_data,
all_task_data,
Expand Down
40 changes: 40 additions & 0 deletions dsub/lib/resources.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Enable dsub methods to access resources in dsub packages.

dsub, dstat, and ddel are designed to run under a few different packaging
environments. This module implements access to resources (such as static
text files) for the setuptools distribution.

This module is imported by dsub.py, dstat.py, and ddel.py and passed down to
other classes that may need it. This module should not otherwise be imported
directly.

This mechanism allows users of dsub.py, dstat.py, and ddel.py to replace the
resources module with their own resource package after important and before
calling main() or other entrypoints.
"""

import os

# The resource root is the root dsub directory.
# For example:
# my_dir/dsub/dsub/lib/resources.py --> my_dir/dsub
_RESOURCE_ROOT = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))))


def get_resource(resource_path, mode='rb'):
with open(os.path.join(_RESOURCE_ROOT, resource_path), mode=mode) as f:
return f.read()
Loading