Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🎉 Zendesk Connector: Adds TicketMetricEvents #8168

Original file line number Diff line number Diff line change
Expand Up @@ -634,7 +634,7 @@
- name: Zendesk Support
sourceDefinitionId: 79c1aa37-dae3-42ae-b333-d1c105477715
dockerRepository: airbyte/source-zendesk-support
dockerImageTag: 0.1.6
dockerImageTag: 0.1.7
documentationUrl: https://docs.airbyte.io/integrations/sources/zendesk-support
icon: zendesk.svg
sourceType: api
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6217,7 +6217,7 @@
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
- dockerImage: "airbyte/source-zendesk-support:0.1.6"
- dockerImage: "airbyte/source-zendesk-support:0.1.7"
spec:
documentationUrl: "https://docs.airbyte.io/integrations/sources/zendesk-support"
connectionSpecification:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ COPY source_zendesk_support ./source_zendesk_support
ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py"
ENTRYPOINT ["python", "/airbyte/integration_code/main.py"]

LABEL io.airbyte.version=0.1.6
LABEL io.airbyte.version=0.1.7
LABEL io.airbyte.name=airbyte/source-zendesk-support
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@
"ticket_metrics": {
"updated_at": "2022-07-19T22:21:26Z"
},
"ticket_metric_events": {
"time": "2022-07-19T22:21:26Z"
},
"macros": {
"updated_at": "2022-12-11T19:34:06Z"
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,18 @@
"sync_mode": "incremental",
"destination_sync_mode": "append"
},
{
"stream": {
"name": "ticket_metric_events",
"json_schema": {},
"supported_sync_modes": ["full_refresh", "incremental"],
"source_defined_cursor": true,
"default_cursor_field": ["time"],
"source_defined_primary_key": [["id"]]
},
"sync_mode": "incremental",
"destination_sync_mode": "append"
},
{
"stream": {
"name": "tickets",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
{
"properties": {
"metric": {
"type": ["null", "string"]
},
"id": {
"type": ["null", "integer"]
},
"time": {
"type": ["null", "string"]
},
"instance_id": {
"type": ["null", "integer"]
},
"ticket_id": {
"type": ["null", "integer"]
},
"type": {
"type": ["null", "string"]
}
},
"type": ["null", "object"]
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
TicketFields,
TicketForms,
TicketMetrics,
TicketMetricEvents,
Tickets,
Users,
UserSettingsStream,
Expand Down Expand Up @@ -104,6 +105,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]:
TicketFields(**args),
TicketForms(**args),
TicketMetrics(**args),
TicketMetricEvents(**args),
Tickets(**args),
Users(**args),
]
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def backoff_time(self, response: requests.Response) -> Union[int, float]:
@staticmethod
def str2datetime(str_dt: str) -> datetime:
"""convert string to datetime object
Input example: '2021-07-22T06:55:55Z' FROMAT : "%Y-%m-%dT%H:%M:%SZ"
Input example: '2021-07-22T06:55:55Z' FORMAT : "%Y-%m-%dT%H:%M:%SZ"
"""
if not str_dt:
return None
Expand All @@ -86,7 +86,7 @@ def str2datetime(str_dt: str) -> datetime:
@staticmethod
def datetime2str(dt: datetime) -> str:
"""convert datetime object to string
Output example: '2021-07-22T06:55:55Z' FROMAT : "%Y-%m-%dT%H:%M:%SZ"
Output example: '2021-07-22T06:55:55Z' FORMAT : "%Y-%m-%dT%H:%M:%SZ"
"""
return datetime.strftime(dt.replace(tzinfo=pytz.UTC), DATETIME_FORMAT)

Expand Down Expand Up @@ -126,7 +126,7 @@ class IncrementalEntityStream(SourceZendeskSupportStream, ABC):

def __init__(self, start_date: str, **kwargs):
super().__init__(**kwargs)
# add the custom value for skiping of not relevant records
# add the custom value for skipping of not relevant records
self._start_date = self.str2datetime(start_date) if isinstance(start_date, str) else start_date
# Flag for marking of completed process
self._finished = False
Expand Down Expand Up @@ -183,7 +183,7 @@ class IncrementalExportStream(IncrementalEntityStream, ABC):
page_size = 1000

# try to save a stage after every 100 records
# this endpoint provides responces in ascending order.
# this endpoint provides responses in ascending order.
state_checkpoint_interval = 100

def __init__(self, **kwargs):
Expand All @@ -209,14 +209,14 @@ def request_params(
if not next_page_token:
current_state = stream_state.get(LAST_END_TIME_KEY)
if not current_state:
# try to search all reconds with generated_timestamp > start_time
# try to search all records with generated_timestamp > start_time
current_state = stream_state.get(self.cursor_field)
if current_state and isinstance(current_state, str) and not current_state.isdigit():
current_state = self.str2unixtime(current_state)
elif not self.last_end_time:
self.last_end_time = current_state
start_time = int(current_state or time.mktime(self._start_date.timetuple()))
# +1 because the API returns all records where generated_timestamp >= start_time
# +1 because the API returns all records where generated_timestamp >= start_time

now = calendar.timegm(datetime.now().utctimetuple())
if start_time > now - 60:
Expand Down Expand Up @@ -265,8 +265,8 @@ def parse_response(
class IncrementalUnsortedStream(IncrementalEntityStream, ABC):
"""Stream for loading without sorting

Some endpoints don't provide approachs for data filtration
We can load all reconds fully and select updated data only
Some endpoints don't provide approaches for data filtration
We can load all records fully and select updated data only
"""

def __init__(self, **kwargs):
Expand Down Expand Up @@ -459,9 +459,9 @@ def parse_response(
response.raise_for_status()


# NOTE: all Zendesk endpoints can be splitted into several templates of data loading.
# NOTE: all Zendesk endpoints can be split into several templates of data loading.
# 1) with API built-in incremental approach
# 2) pagination and sorting mechanism
# 2) pagination and sorting mechanism
# 3) cursor pagination and sorting mechanism
# 4) without sorting but with pagination
# 5) without created_at/updated_at fields
Expand Down Expand Up @@ -550,7 +550,17 @@ class TicketMetrics(IncrementalUnsortedPageStream):
"""TicketMetric stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metrics/"""


# endpoints provide a pagination and sorting mechanism
class TicketMetricEvents(IncrementalExportStream):
"""TicketMetricEvents stream: https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metric_events/"""

cursor_field = "time"

def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]:
"""Need to save a cursor values as integer"""
state = super().get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record)
if state and state.get(self.cursor_field):
state[self.cursor_field] = datetime.strptime(state[self.cursor_field], DATETIME_FORMAT).timestamp()
return state


class Macros(IncrementalSortedPageStream):
Expand All @@ -572,7 +582,7 @@ class TicketAudits(IncrementalSortedCursorStream):
response_list_name = "audits"


# endpoints dont provide the updated_at/created_at fields
# endpoints don't provide the updated_at/created_at fields
# thus we can't implement an incremental logic for them


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def config():
[
# Retry-After > 0
("Retry-After", "123", 123),
# Retry-Afte < 0
# Retry-After < 0
("Retry-After", "-123", None),
# X-Rate-Limit > 0
("X-Rate-Limit", "100", 1.2),
Expand All @@ -64,7 +64,7 @@ def test_backoff_cases(prepare_stream_args, header_name, header_value, expected)


@pytest.mark.parametrize(
"status_code,expected_comment_count,expected_expection",
"status_code,expected_comment_count,expected_exception",
[
# success
(200, 1, None),
Expand All @@ -74,7 +74,7 @@ def test_backoff_cases(prepare_stream_args, header_name, header_value, expected)
(403, 0, HTTPError),
],
)
def test_comments_not_found_ticket(prepare_stream_args, status_code, expected_comment_count, expected_expection):
def test_comments_not_found_ticket(prepare_stream_args, status_code, expected_comment_count, expected_exception):
"""Checks the case when some ticket is removed while sync of comments"""
fake_id = 12345
stream = TicketComments(**prepare_stream_args)
Expand All @@ -100,8 +100,8 @@ def test_comments_not_found_ticket(prepare_stream_args, status_code, expected_co
"id": fake_id,
},
)
if expected_expection:
with pytest.raises(expected_expection):
if expected_exception:
with pytest.raises(expected_exception):
next(comments)
else:
assert len(list(comments)) == expected_comment_count
Expand Down
3 changes: 2 additions & 1 deletion docs/integrations/sources/zendesk-support.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ This Source is capable of syncing the following core Streams:
* [Ticket Fields](https://developer.zendesk.com/rest_api/docs/support/ticket_fields)
* [Ticket Forms](https://developer.zendesk.com/rest_api/docs/support/ticket_forms)
* [Ticket Metrics](https://developer.zendesk.com/rest_api/docs/support/ticket_metrics)
* [Ticket Metric Events](https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metric_events/)
* [Group Memberships](https://developer.zendesk.com/rest_api/docs/support/group_memberships)
* [Macros](https://developer.zendesk.com/rest_api/docs/support/macros)
* [Satisfaction Ratings](https://developer.zendesk.com/rest_api/docs/support/satisfaction_ratings)
Expand All @@ -33,7 +34,6 @@ This Source is capable of syncing the following core Streams:

* [Ticket Attachments](https://developer.zendesk.com/api-reference/ticketing/tickets/ticket-attachments/)
* [Ticket Requests](https://developer.zendesk.com/api-reference/ticketing/tickets/ticket-requests/)
* [Ticket Metric Events](https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_metric_events/)
* [Ticket Activities](https://developer.zendesk.com/api-reference/ticketing/tickets/activity_stream/)
* [Ticket Skips](https://developer.zendesk.com/api-reference/ticketing/tickets/ticket_skips/)

Expand Down Expand Up @@ -97,6 +97,7 @@ We recommend creating a restricted, read-only key specifically for Airbyte acces

| Version | Date | Pull Request | Subject |
| :------ | :-------- | :----- | :------ |
| `0.1.7` | 2021-11-23 | [8050](https://github.com/airbytehq/airbyte/pull/8168) | Adds TicketMetricEvents |
| `0.1.6` | 2021-11-18 | [8050](https://github.com/airbytehq/airbyte/pull/8050) | Fix wrong types for schemas, add Transformer |
| `0.1.5` | 2021-10-26 | [7679](https://github.com/airbytehq/airbyte/pull/7679) | Add ticket_id and ticket_comments |
| `0.1.4` | 2021-10-26 | [7377](https://github.com/airbytehq/airbyte/pull/7377) | fix initially_assigned_at type in ticket metrics |
Expand Down