Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BigQuery DataTransfer: remove unused message exports (via synth). #7263

Merged
merged 2 commits into from
Apr 12, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@
class DataTransferServiceClient(object):
"""
The Google BigQuery Data Transfer Service API enables BigQuery users to
configure the transfer of their data from other Google Products into BigQuery.
This service contains methods that are end user exposed. It backs up the
frontend.
configure the transfer of their data from other Google Products into
BigQuery. This service contains methods that are end user exposed. It backs
up the frontend.
"""

SERVICE_ADDRESS = "bigquerydatatransfer.googleapis.com:443"
Expand Down Expand Up @@ -432,7 +432,7 @@ def create_transfer_config(
configuration. This is required if new credentials are needed, as
indicated by ``CheckValidCreds``. In order to obtain
authorization\_code, please make a request to
https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=&redirect\_uri=
https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=<data\_source\_scopes>&redirect\_uri=<redirect\_uri>

- client\_id should be OAuth client\_id of BigQuery DTS API for the
given data source returned by ListDataSources method.
Expand Down Expand Up @@ -536,7 +536,7 @@ def update_transfer_config(
configuration. If it is provided, the transfer configuration will be
associated with the authorizing user. In order to obtain
authorization\_code, please make a request to
https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=&redirect\_uri=
https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client\_id=&scope=<data\_source\_scopes>&redirect\_uri=<redirect\_uri>

- client\_id should be OAuth client\_id of BigQuery DTS API for the
given data source returned by ListDataSources method.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,36 +31,33 @@ option java_outer_classname = "DataTransferProto";
option java_package = "com.google.cloud.bigquery.datatransfer.v1";
option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";


// The Google BigQuery Data Transfer Service API enables BigQuery users to
// configure the transfer of their data from other Google Products into BigQuery.
// This service contains methods that are end user exposed. It backs up the
// frontend.
// configure the transfer of their data from other Google Products into
// BigQuery. This service contains methods that are end user exposed. It backs
// up the frontend.
service DataTransferService {
// Retrieves a supported data source and returns its settings,
// which can be used for UI rendering.
rpc GetDataSource(GetDataSourceRequest) returns (DataSource) {
option (google.api.http) = {
get: "/v1/{name=projects/*/locations/*/dataSources/*}"
additional_bindings {
get: "/v1/{name=projects/*/dataSources/*}"
}
additional_bindings { get: "/v1/{name=projects/*/dataSources/*}" }
};
}

// Lists supported data sources and returns their settings,
// which can be used for UI rendering.
rpc ListDataSources(ListDataSourcesRequest) returns (ListDataSourcesResponse) {
rpc ListDataSources(ListDataSourcesRequest)
returns (ListDataSourcesResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*}/dataSources"
additional_bindings {
get: "/v1/{parent=projects/*}/dataSources"
}
additional_bindings { get: "/v1/{parent=projects/*}/dataSources" }
};
}

// Creates a new data transfer configuration.
rpc CreateTransferConfig(CreateTransferConfigRequest) returns (TransferConfig) {
rpc CreateTransferConfig(CreateTransferConfigRequest)
returns (TransferConfig) {
option (google.api.http) = {
post: "/v1/{parent=projects/*/locations/*}/transferConfigs"
body: "transfer_config"
Expand All @@ -73,7 +70,8 @@ service DataTransferService {

// Updates a data transfer configuration.
// All fields must be set, even if they are not updated.
rpc UpdateTransferConfig(UpdateTransferConfigRequest) returns (TransferConfig) {
rpc UpdateTransferConfig(UpdateTransferConfigRequest)
returns (TransferConfig) {
option (google.api.http) = {
patch: "/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}"
body: "transfer_config"
Expand All @@ -86,40 +84,37 @@ service DataTransferService {

// Deletes a data transfer configuration,
// including any associated transfer runs and logs.
rpc DeleteTransferConfig(DeleteTransferConfigRequest) returns (google.protobuf.Empty) {
rpc DeleteTransferConfig(DeleteTransferConfigRequest)
returns (google.protobuf.Empty) {
option (google.api.http) = {
delete: "/v1/{name=projects/*/locations/*/transferConfigs/*}"
additional_bindings {
delete: "/v1/{name=projects/*/transferConfigs/*}"
}
additional_bindings { delete: "/v1/{name=projects/*/transferConfigs/*}" }
};
}

// Returns information about a data transfer config.
rpc GetTransferConfig(GetTransferConfigRequest) returns (TransferConfig) {
option (google.api.http) = {
get: "/v1/{name=projects/*/locations/*/transferConfigs/*}"
additional_bindings {
get: "/v1/{name=projects/*/transferConfigs/*}"
}
additional_bindings { get: "/v1/{name=projects/*/transferConfigs/*}" }
};
}

// Returns information about all data transfers in the project.
rpc ListTransferConfigs(ListTransferConfigsRequest) returns (ListTransferConfigsResponse) {
rpc ListTransferConfigs(ListTransferConfigsRequest)
returns (ListTransferConfigsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*}/transferConfigs"
additional_bindings {
get: "/v1/{parent=projects/*}/transferConfigs"
}
additional_bindings { get: "/v1/{parent=projects/*}/transferConfigs" }
};
}

// Creates transfer runs for a time range [start_time, end_time].
// For each date - or whatever granularity the data source supports - in the
// range, one transfer run is created.
// Note that runs are created per UTC time in the time range.
rpc ScheduleTransferRuns(ScheduleTransferRunsRequest) returns (ScheduleTransferRunsResponse) {
rpc ScheduleTransferRuns(ScheduleTransferRunsRequest)
returns (ScheduleTransferRunsResponse) {
option (google.api.http) = {
post: "/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns"
body: "*"
Expand All @@ -141,7 +136,8 @@ service DataTransferService {
}

// Deletes the specified transfer run.
rpc DeleteTransferRun(DeleteTransferRunRequest) returns (google.protobuf.Empty) {
rpc DeleteTransferRun(DeleteTransferRunRequest)
returns (google.protobuf.Empty) {
option (google.api.http) = {
delete: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}"
additional_bindings {
Expand All @@ -151,7 +147,8 @@ service DataTransferService {
}

// Returns information about running and completed jobs.
rpc ListTransferRuns(ListTransferRunsRequest) returns (ListTransferRunsResponse) {
rpc ListTransferRuns(ListTransferRunsRequest)
returns (ListTransferRunsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs"
additional_bindings {
Expand All @@ -161,7 +158,8 @@ service DataTransferService {
}

// Returns user facing log messages for the data transfer run.
rpc ListTransferLogs(ListTransferLogsRequest) returns (ListTransferLogsResponse) {
rpc ListTransferLogs(ListTransferLogsRequest)
returns (ListTransferLogsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs"
additional_bindings {
Expand All @@ -176,7 +174,8 @@ service DataTransferService {
// them on behalf of the end user. This API just checks whether we have OAuth
// token for the particular user, which is a pre-requisite before user can
// create a transfer config.
rpc CheckValidCreds(CheckValidCredsRequest) returns (CheckValidCredsResponse) {
rpc CheckValidCreds(CheckValidCredsRequest)
returns (CheckValidCredsResponse) {
option (google.api.http) = {
post: "/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds"
body: "*"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@

class DataTransferServiceStub(object):
"""The Google BigQuery Data Transfer Service API enables BigQuery users to
configure the transfer of their data from other Google Products into BigQuery.
This service contains methods that are end user exposed. It backs up the
frontend.
configure the transfer of their data from other Google Products into
BigQuery. This service contains methods that are end user exposed. It backs
up the frontend.
"""

def __init__(self, channel):
Expand Down Expand Up @@ -92,9 +92,9 @@ def __init__(self, channel):

class DataTransferServiceServicer(object):
"""The Google BigQuery Data Transfer Service API enables BigQuery users to
configure the transfer of their data from other Google Products into BigQuery.
This service contains methods that are end user exposed. It backs up the
frontend.
configure the transfer of their data from other Google Products into
BigQuery. This service contains methods that are end user exposed. It backs
up the frontend.
"""

def GetDataSource(self, request, context):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ option java_package = "com.google.cloud.bigquery.datatransfer.v1";
option objc_class_prefix = "GCBDT";
option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";


// Represents a data transfer configuration. A transfer configuration
// contains all metadata needed to perform a data transfer. For example,
// `destination_dataset_id` specifies where data should be stored.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,9 @@

from google.api_core.protobuf_helpers import get_messages

from google.api import http_pb2
from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2
from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2
from google.protobuf import any_pb2
from google.protobuf import descriptor_pb2
from google.protobuf import duration_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
Expand All @@ -33,9 +31,7 @@
from google.rpc import status_pb2

_shared_modules = [
http_pb2,
any_pb2,
descriptor_pb2,
duration_pb2,
empty_pb2,
field_mask_pb2,
Expand Down
8 changes: 6 additions & 2 deletions bigquery_datatransfer/noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,10 @@ def blacken(session):
"""Run black.

Format code to uniform standard.

This currently uses Python 3.6 due to the automated Kokoro run of synthtool.
That run uses an image that doesn't have 3.6 installed. Before updating this
check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
"""
session.install("black")
session.run(
Expand Down Expand Up @@ -78,7 +82,7 @@ def default(session):
"--cov-append",
"--cov-config=.coveragerc",
"--cov-report=",
"--cov-fail-under=80",
"--cov-fail-under=79",
os.path.join("tests", "unit"),
*session.posargs,
)
Expand Down Expand Up @@ -131,6 +135,6 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=80")
session.run("coverage", "report", "--show-missing", "--fail-under=79")

session.run("coverage", "erase")
16 changes: 4 additions & 12 deletions bigquery_datatransfer/synth.metadata
Original file line number Diff line number Diff line change
@@ -1,26 +1,18 @@
{
"updateTime": "2019-01-23T22:00:39.365486Z",
"updateTime": "2019-04-12T17:01:24.063249Z",
"sources": [
{
"generator": {
"name": "artman",
"version": "0.16.7",
"dockerImage": "googleapis/artman@sha256:d6c8ced606eb49973ca95d2af7c55a681acc042db0f87d135968349e7bf6dd80"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
"sha": "9aac88a22468b1e291937f55fa1ef237adfdc63e",
"internalRef": "230568136"
"version": "0.16.25",
"dockerImage": "googleapis/artman@sha256:d9597f983d1d4e61272c63cb97b7d8f8234da9999526c35d357de3d781f0ec1b"
}
},
{
"template": {
"name": "python_library",
"origin": "synthtool.gcp",
"version": "2019.1.16"
"version": "2019.4.10"
}
}
],
Expand Down
2 changes: 1 addition & 1 deletion bigquery_datatransfer/synth.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
templated_files = common.py_library(unit_cov_level=80, cov_level=80)
templated_files = common.py_library(unit_cov_level=79, cov_level=79)
s.move(templated_files)

s.shell.run(["nox", "-s", "blacken"], hide_output=False)