Skip to content

Commit

Permalink
docs: rearrange examples to dedicated files
Browse files Browse the repository at this point in the history
  • Loading branch information
epikhinm committed Jun 6, 2023
1 parent 31b47db commit 1f46be3
Show file tree
Hide file tree
Showing 22 changed files with 316 additions and 288 deletions.
29 changes: 29 additions & 0 deletions examples/clickhouse/create.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import logging

from google.protobuf.wrappers_pb2 import Int64Value

from doublecloud.clickhouse.v1.cluster_pb2 import ClusterResources
from doublecloud.clickhouse.v1.cluster_service_pb2 import CreateClusterRequest
from doublecloud.clickhouse.v1.cluster_service_pb2_grpc import ClusterServiceStub


def create_cluster(sdk, project_id, region_id, name, network_id):
cluster_service = sdk.client(ClusterServiceStub)
operation = cluster_service.Create(
CreateClusterRequest(
project_id=project_id,
cloud_type="aws",
region_id=region_id,
name=name,
resources=ClusterResources(
clickhouse=ClusterResources.Clickhouse(
resource_preset_id="s1-c2-m4",
disk_size=Int64Value(value=32 * 2**30),
replica_count=Int64Value(value=1),
)
),
network_id=network_id,
)
)
logging.info("Creating initiated")
return operation
7 changes: 7 additions & 0 deletions examples/clickhouse/delete.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from doublecloud.clickhouse.v1.cluster_service_pb2 import DeleteClusterRequest
from doublecloud.clickhouse.v1.cluster_service_pb2_grpc import ClusterServiceStub


def delete_cluster(sdk, cluster_id):
cluster_service = sdk.client(ClusterServiceStub)
return cluster_service.Delete(DeleteClusterRequest(cluster_id=cluster_id))
37 changes: 3 additions & 34 deletions examples/clickhouse/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,42 +3,11 @@
import json
import logging

from google.protobuf.wrappers_pb2 import Int64Value
# pylint: disable=E0401
from create import create_cluster
from delete import delete_cluster

import doublecloud
from doublecloud.clickhouse.v1.cluster_pb2 import ClusterResources
from doublecloud.clickhouse.v1.cluster_service_pb2 import (
CreateClusterRequest,
DeleteClusterRequest,
)
from doublecloud.clickhouse.v1.cluster_service_pb2_grpc import ClusterServiceStub


def create_cluster(sdk, project_id, region_id, name, network_id):
cluster_service = sdk.client(ClusterServiceStub)
operation = cluster_service.Create(
CreateClusterRequest(
project_id=project_id,
cloud_type="aws",
region_id=region_id,
name=name,
resources=ClusterResources(
clickhouse=ClusterResources.Clickhouse(
resource_preset_id="s1-c2-m4",
disk_size=Int64Value(value=32 * 2**30),
replica_count=Int64Value(value=1),
)
),
network_id=network_id,
)
)
logging.info("Creating initiated")
return operation


def delete_cluster(sdk, cluster_id):
cluster_service = sdk.client(ClusterServiceStub)
return cluster_service.Delete(DeleteClusterRequest(cluster_id=cluster_id))


def main():
Expand Down
30 changes: 30 additions & 0 deletions examples/kafka/create.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import logging

from google.protobuf.wrappers_pb2 import Int64Value

from doublecloud.kafka.v1.cluster_pb2 import ClusterResources
from doublecloud.kafka.v1.cluster_service_pb2 import CreateClusterRequest
from doublecloud.kafka.v1.cluster_service_pb2_grpc import ClusterServiceStub


def create_cluster(sdk, project_id, region_id, name, network_id):
cluster_service = sdk.client(ClusterServiceStub)
operation = cluster_service.Create(
CreateClusterRequest(
project_id=project_id,
cloud_type="aws",
region_id=region_id,
name=name,
resources=ClusterResources(
kafka=ClusterResources.Kafka(
resource_preset_id="s1-c2-m4",
disk_size=Int64Value(value=32 * 2**30),
broker_count=Int64Value(value=1),
zone_count=Int64Value(value=1),
)
),
network_id=network_id,
)
)
logging.info("Creating initiated")
return operation
7 changes: 7 additions & 0 deletions examples/kafka/delete.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from doublecloud.kafka.v1.cluster_service_pb2 import DeleteClusterRequest
from doublecloud.kafka.v1.cluster_service_pb2_grpc import ClusterServiceStub


def delete_cluster(sdk, cluster_id):
cluster_service = sdk.client(ClusterServiceStub)
return cluster_service.Delete(DeleteClusterRequest(cluster_id=cluster_id))
38 changes: 3 additions & 35 deletions examples/kafka/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,43 +3,11 @@
import json
import logging

from google.protobuf.wrappers_pb2 import Int64Value
# pylint: disable=E0401
from create import create_cluster
from delete import delete_cluster

import doublecloud
from doublecloud.kafka.v1.cluster_pb2 import ClusterResources
from doublecloud.kafka.v1.cluster_service_pb2 import (
CreateClusterRequest,
DeleteClusterRequest,
)
from doublecloud.kafka.v1.cluster_service_pb2_grpc import ClusterServiceStub


def create_cluster(sdk, project_id, region_id, name, network_id):
cluster_service = sdk.client(ClusterServiceStub)
operation = cluster_service.Create(
CreateClusterRequest(
project_id=project_id,
cloud_type="aws",
region_id=region_id,
name=name,
resources=ClusterResources(
kafka=ClusterResources.Kafka(
resource_preset_id="s1-c2-m4",
disk_size=Int64Value(value=32 * 2**30),
broker_count=Int64Value(value=1),
zone_count=Int64Value(value=1),
)
),
network_id=network_id,
)
)
logging.info("Creating initiated")
return operation


def delete_cluster(sdk, cluster_id):
cluster_service = sdk.client(ClusterServiceStub)
return cluster_service.Delete(DeleteClusterRequest(cluster_id=cluster_id))


def main():
Expand Down
5 changes: 5 additions & 0 deletions examples/transfer/activate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from doublecloud.transfer.v1.transfer_service_pb2 import ActivateTransferRequest


def activate_transfer(svc, transfer_id: str):
return svc.Activate(ActivateTransferRequest(transfer_id=transfer_id))
10 changes: 10 additions & 0 deletions examples/transfer/create.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from doublecloud.transfer.v1.transfer_pb2 import TransferType
from doublecloud.transfer.v1.transfer_service_pb2 import CreateTransferRequest


def create_transfer(svc, project_id: str, name: str, src_id: str, dst_id: str):
return svc.Create(
CreateTransferRequest(
source_id=src_id, target_id=dst_id, name=name, project_id=project_id, type=TransferType.SNAPSHOT_ONLY
)
)
5 changes: 5 additions & 0 deletions examples/transfer/deactivate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from doublecloud.transfer.v1.transfer_service_pb2 import DeactivateTransferRequest


def deactivate_transfer(svc, transfer_id: str):
return svc.Deactivate(DeactivateTransferRequest(transfer_id=transfer_id))
5 changes: 5 additions & 0 deletions examples/transfer/delete.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from doublecloud.transfer.v1.transfer_service_pb2 import DeleteTransferRequest


def delete_transfer(svc, transfer_id: str):
return svc.Delete(DeleteTransferRequest(transfer_id=transfer_id))
55 changes: 55 additions & 0 deletions examples/transfer/endpoints.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from doublecloud.transfer.v1.endpoint.airbyte.s3_source_pb2 import S3Source
from doublecloud.transfer.v1.endpoint.clickhouse_pb2 import (
ClickhouseConnection,
ClickhouseConnectionOptions,
ClickhouseTarget,
)
from doublecloud.transfer.v1.endpoint.common_pb2 import Secret
from doublecloud.transfer.v1.endpoint_pb2 import EndpointSettings
from doublecloud.transfer.v1.endpoint_service_pb2 import (
CreateEndpointRequest,
DeleteEndpointRequest,
)


def create_s3_src_endpoint(svc, project_id: str, name: str):
return svc.Create(
CreateEndpointRequest(
project_id=project_id,
name=f"s3-src-{name}",
settings=EndpointSettings(
s3_source=S3Source(
dataset="test",
path_pattern="test",
schema="test",
format=S3Source.Format(csv=S3Source.Csv()),
provider=S3Source.Provider(bucket="test"),
)
),
)
)


def create_ch_dst_endpoint(svc, project_id: str, name: str):
return svc.Create(
CreateEndpointRequest(
project_id=project_id,
name=f"ch-dst-{name}",
settings=EndpointSettings(
clickhouse_target=ClickhouseTarget(
connection=ClickhouseConnection(
connection_options=ClickhouseConnectionOptions(
mdb_cluster_id="xoxo",
database="default",
user="user",
password=Secret(raw="98s*%^P!3Bw38"),
)
)
)
),
)
)


def delete_endpoint(svc, endpoint_id: str):
return svc.Delete(DeleteEndpointRequest(endpoint_id=endpoint_id))
89 changes: 7 additions & 82 deletions examples/transfer/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,93 +3,18 @@
import json
import logging

# pylint: disable=E0401
from activate import activate_transfer
from create import create_transfer
from deactivate import deactivate_transfer
from delete import delete_transfer
from endpoints import create_ch_dst_endpoint, create_s3_src_endpoint, delete_endpoint

import doublecloud
from doublecloud.transfer.v1.endpoint.airbyte.s3_source_pb2 import S3Source
from doublecloud.transfer.v1.endpoint.clickhouse_pb2 import (
ClickhouseConnection,
ClickhouseConnectionOptions,
ClickhouseTarget,
)
from doublecloud.transfer.v1.endpoint.common_pb2 import Secret
from doublecloud.transfer.v1.endpoint_pb2 import EndpointSettings
from doublecloud.transfer.v1.endpoint_service_pb2 import (
CreateEndpointRequest,
DeleteEndpointRequest,
)
from doublecloud.transfer.v1.endpoint_service_pb2_grpc import EndpointServiceStub
from doublecloud.transfer.v1.transfer_pb2 import TransferType
from doublecloud.transfer.v1.transfer_service_pb2 import (
ActivateTransferRequest,
CreateTransferRequest,
DeactivateTransferRequest,
DeleteTransferRequest,
)
from doublecloud.transfer.v1.transfer_service_pb2_grpc import TransferServiceStub


def create_s3_src_endpoint(svc, project_id: str, name: str):
return svc.Create(
CreateEndpointRequest(
project_id=project_id,
name=f"s3-src-{name}",
settings=EndpointSettings(
s3_source=S3Source(
dataset="test",
path_pattern="test",
schema="test",
format=S3Source.Format(csv=S3Source.Csv()),
provider=S3Source.Provider(bucket="test"),
)
),
)
)


def delete_endpoint(svc, endpoint_id: str):
return svc.Delete(DeleteEndpointRequest(endpoint_id=endpoint_id))


def create_ch_dst_endpoint(svc, project_id: str, name: str):
return svc.Create(
CreateEndpointRequest(
project_id=project_id,
name=f"ch-dst-{name}",
settings=EndpointSettings(
clickhouse_target=ClickhouseTarget(
connection=ClickhouseConnection(
connection_options=ClickhouseConnectionOptions(
mdb_cluster_id="xoxo",
database="default",
user="user",
password=Secret(raw="98s*%^P!3Bw38"),
)
)
)
),
)
)


def create_transfer(svc, project_id: str, name: str, src_id: str, dst_id: str):
return svc.Create(
CreateTransferRequest(
source_id=src_id, target_id=dst_id, name=name, project_id=project_id, type=TransferType.SNAPSHOT_ONLY
)
)


def activate_transfer(svc, transfer_id: str):
return svc.Activate(ActivateTransferRequest(transfer_id=transfer_id))


def deactivate_transfer(svc, transfer_id: str):
return svc.Deactivate(DeactivateTransferRequest(transfer_id=transfer_id))


def delete_transfer(svc, transfer_id: str):
return svc.Delete(DeleteTransferRequest(transfer_id=transfer_id))


def main():
logging.basicConfig(level=logging.INFO)
arguments = parse_args()
Expand Down
27 changes: 27 additions & 0 deletions examples/visualization/advise_dataset_fields.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from doublecloud.visualization.v1.workbook_pb2 import Dataset
from doublecloud.visualization.v1.workbook_service_pb2 import AdviseDatasetFieldsRequest


def advise_dataset_fields(svc, workbook_id: str, sources: list, connection_name: str):
"""
Function helps to define automatically all fields, their names/IDs and types
based on underlying datasource (table, view, SQL query, etc.).
ID of fields will be equals to column names.
You can use define them manually or use this handler to simplifying for popular cases
"""
dataset = Dataset()
dataset.config.struct_value.update(
{
"fields": [],
"avatars": None,
"sources": sources,
}
)

return svc.AdviseDatasetFields(
AdviseDatasetFieldsRequest(
workbook_id=workbook_id,
connection_name=connection_name,
partial_dataset=dataset,
)
)
10 changes: 10 additions & 0 deletions examples/visualization/create.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from doublecloud.v1.operation_pb2 import Operation
from doublecloud.visualization.v1.workbook_service_pb2 import CreateWorkbookRequest


def create_workbook(svc, project_id: str, name: str) -> Operation:
"""
Function creates an empty workbook
We will fill it with other functions
"""
return svc.Create(CreateWorkbookRequest(project_id=project_id, workbook_title=name))
Loading

0 comments on commit 1f46be3

Please sign in to comment.