Skip to content

Commit

Permalink
De-lint.
Browse files Browse the repository at this point in the history
Note that splitting up the field handling into separate methods is
arbitrary, to work around pylint's arbirary 'too-many-branches' opinion.
  • Loading branch information
tseaver committed Aug 12, 2015
1 parent 0b9474b commit 4c14890
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 37 deletions.
39 changes: 21 additions & 18 deletions gcloud/bigquery/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -570,24 +570,7 @@ def _require_client(self, client):
client = self._client
return client

def _build_resource(self):
"""Generate a resource for ``begin``."""
resource = {
'jobReference': {
'projectId': self.project,
'jobId': self.name,
},
'configuration': {
'sourceUris': self.source_uris,
'destinationTable': {
'projectId': self.destination.project,
'datasetId': self.destination.dataset_name,
'tableId': self.destination.name,
},
'load': {},
},
}
configuration = resource['configuration']['load']
def _populate_config_resource(self, configuration):

if self.allow_jagged_rows is not None:
configuration['allowJaggedRows'] = self.allow_jagged_rows
Expand All @@ -612,6 +595,26 @@ def _build_resource(self):
if self.write_disposition is not None:
configuration['writeDisposition'] = self.write_disposition

def _build_resource(self):
"""Generate a resource for ``begin``."""
resource = {
'jobReference': {
'projectId': self.project,
'jobId': self.name,
},
'configuration': {
'sourceUris': self.source_uris,
'destinationTable': {
'projectId': self.destination.project,
'datasetId': self.destination.dataset_name,
'tableId': self.destination.name,
},
'load': {},
},
}
configuration = resource['configuration']['load']
self._populate_config_resource(configuration)

if len(self.schema) > 0:
configuration['schema'] = {
'fields': _build_schema_resource(self.schema)}
Expand Down
1 change: 1 addition & 0 deletions gcloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -692,6 +692,7 @@ def insert_data(self,

return errors


def _parse_schema_resource(info):
"""Parse a resource fragment into a schema field.
Expand Down
46 changes: 27 additions & 19 deletions gcloud/bigquery/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,10 +109,7 @@ def _verifyReadonlyResourceProperties(self, job, resource):
else:
self.assertEqual(job.user_email, None)

def _verifyResourceProperties(self, job, resource):
self._verifyReadonlyResourceProperties(job, resource)

config = resource.get('configuration', {}).get('load')
def _verifyBooleanConfigProperties(self, job, config):
if 'allowJaggedRows' in config:
self.assertEqual(job.allow_jagged_rows,
config['allowJaggedRows'])
Expand All @@ -123,6 +120,13 @@ def _verifyResourceProperties(self, job, resource):
config['allowQuotedNewlines'])
else:
self.assertTrue(job.allow_quoted_newlines is None)
if 'ignoreUnknownValues' in config:
self.assertEqual(job.ignore_unknown_values,
config['ignoreUnknownValues'])
else:
self.assertTrue(job.ignore_unknown_values is None)

def _verifyEnumConfigProperties(self, job, config):
if 'createDisposition' in config:
self.assertEqual(job.create_disposition,
config['createDisposition'])
Expand All @@ -133,16 +137,30 @@ def _verifyResourceProperties(self, job, resource):
config['encoding'])
else:
self.assertTrue(job.encoding is None)
if 'sourceFormat' in config:
self.assertEqual(job.source_format,
config['sourceFormat'])
else:
self.assertTrue(job.source_format is None)
if 'writeDisposition' in config:
self.assertEqual(job.write_disposition,
config['writeDisposition'])
else:
self.assertTrue(job.write_disposition is None)

def _verifyResourceProperties(self, job, resource):
self._verifyReadonlyResourceProperties(job, resource)

config = resource.get('configuration', {}).get('load')

self._verifyBooleanConfigProperties(job, config)
self._verifyEnumConfigProperties(job, config)

if 'fieldDelimiter' in config:
self.assertEqual(job.field_delimiter,
config['fieldDelimiter'])
else:
self.assertTrue(job.field_delimiter is None)
if 'ignoreUnknownValues' in config:
self.assertEqual(job.ignore_unknown_values,
config['ignoreUnknownValues'])
else:
self.assertTrue(job.ignore_unknown_values is None)
if 'maxBadRecords' in config:
self.assertEqual(job.max_bad_records,
config['maxBadRecords'])
Expand All @@ -158,16 +176,6 @@ def _verifyResourceProperties(self, job, resource):
config['skipLeadingRows'])
else:
self.assertTrue(job.skip_leading_rows is None)
if 'sourceFormat' in config:
self.assertEqual(job.source_format,
config['sourceFormat'])
else:
self.assertTrue(job.source_format is None)
if 'writeDisposition' in config:
self.assertEqual(job.write_disposition,
config['writeDisposition'])
else:
self.assertTrue(job.write_disposition is None)

def test_ctor(self):
client = _Client(self.PROJECT)
Expand Down

0 comments on commit 4c14890

Please sign in to comment.